diff --git a/.env.sample b/.env.sample index efbdd12e..2dd99082 100644 --- a/.env.sample +++ b/.env.sample @@ -35,10 +35,6 @@ PLAYGROUND_ENABLE=false # AMQP URL AMQP_URL=amqp://guest:guest@rabbitmq -# Billing settings -BILLING_DEBUG=true -BILLING_COMPANY_EMAIL="team@hawk.so" - ### Accounting module ### # Accounting service URL # CODEX_ACCOUNTING_URL=http://accounting:3999/graphql @@ -90,3 +86,10 @@ AWS_S3_SECRET_ACCESS_KEY= AWS_S3_BUCKET_NAME= AWS_S3_BUCKET_BASE_URL= AWS_S3_BUCKET_ENDPOINT= + +# SSO Service Provider Entity ID +# Unique identifier for Hawk in SAML IdP configuration +SSO_SP_ENTITY_ID=urn:hawk:tracker:saml + +## SAML state store type (memory or redis, default: redis) +SAML_STORE_TYPE=redis diff --git a/.env.test b/.env.test index 4ae39491..19787b59 100644 --- a/.env.test +++ b/.env.test @@ -46,10 +46,6 @@ SMTP_SENDER_ADDRESS= # AMQP URL AMQP_URL=amqp://guest:guest@rabbitmq:5672/ -# Billing settings -BILLING_DEBUG=true -BILLING_COMPANY_EMAIL="team@hawk.so" - ### Accounting module ### # Accounting service URL # CODEX_ACCOUNTING_URL= @@ -101,3 +97,6 @@ AWS_S3_SECRET_ACCESS_KEY= AWS_S3_BUCKET_NAME= AWS_S3_BUCKET_BASE_URL= AWS_S3_BUCKET_ENDPOINT= + +## SAML state store type (memory or redis, default: redis) +SAML_STORE_TYPE=memory diff --git a/.github/workflows/build-and-push-docker-image.yml b/.github/workflows/build-and-push-docker-image.yml index c6acacfc..cfbb2395 100644 --- a/.github/workflows/build-and-push-docker-image.yml +++ b/.github/workflows/build-and-push-docker-image.yml @@ -48,11 +48,19 @@ jobs: type=semver,pattern={{version}} type=semver,pattern={{major}}.{{minor}} + - name: Read Node.js version from .nvmrc + id: node_version + run: | + NODE_VERSION=$(cat .nvmrc | tr -d 'v') + echo "version=${NODE_VERSION}" >> $GITHUB_OUTPUT + - name: Build and push image uses: docker/build-push-action@v3 with: context: . file: docker/Dockerfile.prod + build-args: | + NODE_VERSION=${{ steps.node_version.outputs.version }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} push: ${{ github.ref == 'refs/heads/stage' || github.ref == 'refs/heads/prod' || startsWith(github.ref, 'refs/tags/v') }} diff --git a/.nvmrc b/.nvmrc index dc0bb0f4..8ef0a525 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -v22.12.0 +v24.11.1 diff --git a/docker-compose.test.yml b/docker-compose.test.yml index ac51dae6..be1d3a69 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -11,9 +11,11 @@ services: - ./:/usr/src/app - /usr/src/app/node_modules - ./test/integration/api.env:/usr/src/app/.env + - ./test/integration/keycloak:/keycloak:ro depends_on: - mongodb - rabbitmq + - keycloak # - accounting stdin_open: true tty: true @@ -32,10 +34,20 @@ services: condition: service_healthy api: condition: service_started - command: dockerize -wait http://api:4000/.well-known/apollo/server-health -timeout 30s yarn jest --config=./test/integration/jest.config.js --runInBand test/integration + keycloak: + condition: service_healthy + environment: + - KEYCLOAK_URL=http://keycloak:8180 + entrypoint: ["/bin/bash", "-c"] + command: + - | + dockerize -wait http://api:4000/.well-known/apollo/server-health -timeout 30s -wait http://keycloak:8180/health/ready -timeout 60s && + /keycloak/setup.sh && + yarn jest --config=./test/integration/jest.config.js --runInBand test/integration volumes: - ./:/usr/src/app - /usr/src/app/node_modules + - ./test/integration/keycloak:/keycloak:ro rabbitmq: image: rabbitmq:3-management @@ -52,6 +64,29 @@ services: timeout: 3s retries: 5 + keycloak: + image: quay.io/keycloak/keycloak:23.0 + environment: + - KEYCLOAK_ADMIN=admin + - KEYCLOAK_ADMIN_PASSWORD=admin + - KC_HTTP_PORT=8180 + - KC_HOSTNAME_STRICT=false + - KC_HOSTNAME_STRICT_HTTPS=false + - KC_HTTP_ENABLED=true + - KC_HEALTH_ENABLED=true + ports: + - 8180:8180 + command: + - start-dev + volumes: + - keycloak-test-data:/opt/keycloak/data + - ./test/integration/keycloak:/opt/keycloak/config + healthcheck: + test: ["CMD-SHELL", "exec 3<>/dev/tcp/127.0.0.1/8180;echo -e 'GET /health/ready HTTP/1.1\r\nhost: http://localhost\r\nConnection: close\r\n\r\n' >&3;if [ $? -eq 0 ]; then echo 'Healthcheck Successful';exit 0;else echo 'Healthcheck Failed';exit 1;fi;"] + interval: 10s + timeout: 5s + retries: 10 + # accounting: # image: codexteamuser/codex-accounting:prod # env_file: @@ -61,3 +96,4 @@ services: volumes: mongodata-test: + keycloak-test-data: diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev index 4235bb06..e28c2646 100644 --- a/docker/Dockerfile.dev +++ b/docker/Dockerfile.dev @@ -1,4 +1,5 @@ -FROM node:22-alpine as builder +ARG NODE_VERSION=24.11.1 +FROM node:${NODE_VERSION}-alpine as builder WORKDIR /usr/src/app RUN apk add --no-cache git gcc g++ python3 make musl-dev @@ -7,11 +8,11 @@ COPY package.json yarn.lock ./ RUN yarn install -FROM node:22-alpine +FROM node:${NODE_VERSION}-alpine WORKDIR /usr/src/app -RUN apk add --no-cache openssl +RUN apk add --no-cache openssl bash curl ENV DOCKERIZE_VERSION v0.6.1 RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ diff --git a/docker/Dockerfile.prod b/docker/Dockerfile.prod index 71fa971c..d922fe7d 100644 --- a/docker/Dockerfile.prod +++ b/docker/Dockerfile.prod @@ -1,4 +1,5 @@ -FROM node:22-alpine as builder +ARG NODE_VERSION=24.11.1 +FROM node:${NODE_VERSION}-alpine as builder WORKDIR /usr/src/app RUN apk add --no-cache git gcc g++ python3 make musl-dev @@ -11,7 +12,7 @@ COPY . . RUN yarn build -FROM node:22-alpine +FROM node:${NODE_VERSION}-alpine WORKDIR /usr/src/app diff --git a/docs/Keycloak.md b/docs/Keycloak.md new file mode 100644 index 00000000..5e8b1ae6 --- /dev/null +++ b/docs/Keycloak.md @@ -0,0 +1,212 @@ +# Keycloak for Hawk SSO Development + +This guide explains how to use Keycloak for testing Hawk's SSO implementation. + +## Quick Start + +### 1. Start Keycloak + +From the project root: + +```bash +docker-compose up keycloak +``` + +Keycloak will be available at: **http://localhost:8180** + +### 2. Run Setup Script + +The setup script will configure Keycloak with a test realm, SAML client, and test users. + +**Option 1: Run from your host machine** (recommended): + +```bash +cd api/test/integration/keycloak +KEYCLOAK_URL=http://localhost:8180 ./setup.sh +``` + +**Option 2: Run from API container** (if you don't have curl on host): + +```bash +docker-compose exec -e KEYCLOAK_URL=http://keycloak:8180 api /keycloak/setup.sh +``` + +**Note:** The setup script requires `curl` and `bash` to interact with Keycloak API. The Keycloak container doesn't have these tools, so we either run from host or from another container (like `api`). + +### 3. Access Keycloak Admin Console + +- URL: http://localhost:8180 +- Username: `admin` +- Password: `admin` + +## Configuration + +### Realm + +- **Name**: `hawk` +- **SAML Endpoint**: http://localhost:8180/realms/hawk/protocol/saml + +### SAML Client + +- **Client ID / Entity ID**: `urn:hawk:tracker:saml` + - This must match `SSO_SP_ENTITY_ID` environment variable in Hawk API +- **Protocol**: SAML 2.0 +- **ACS URL**: http://localhost:4000/auth/sso/saml/{workspaceId}/acs +- **Name ID Format**: email + +### Environment Variables + +Hawk API requires the following environment variable: + +- **SSO_SP_ENTITY_ID**: `urn:hawk:tracker:saml` + - Set in `docker-compose.yml` or `.env` file + - This is the Service Provider Entity ID used to identify Hawk in SAML requests + +### Test Users + +| Username | Email | Password | Department | Title | +|----------|-------|----------|------------|-------| +| testuser | testuser@hawk.local | password123 | Engineering | Software Engineer | +| alice | alice@hawk.local | password123 | Product | Product Manager | +| bob | bob@hawk.local | password123 | Engineering | Senior Developer | + +## Hawk SSO Configuration + +To configure SSO in Hawk workspace settings: + +### Get Configuration Automatically + +**Option 1: Use the helper script** (recommended): + +```bash +cd api/test/integration/keycloak +./get-config.sh +``` + +This will output all required values that you can copy-paste into Hawk SSO settings. + +**Option 2: Get values manually**: + +### Required Fields + +1. **IdP Entity ID**: + ``` + http://localhost:8180/realms/hawk + ``` + +2. **SSO URL**: + ``` + http://localhost:8180/realms/hawk/protocol/saml + ``` + +3. **X.509 Certificate**: + + **Via command line**: + ```bash + curl -s "http://localhost:8180/realms/hawk/protocol/saml/descriptor" | grep -oP '(?<=)[^<]+' | head -1 + ``` + + **Via Keycloak Admin Console**: + - Go to Realm Settings → Keys + - Find RS256 algorithm row + - Click "Certificate" button + - Copy the certificate (without BEGIN/END lines) + - Paste into Hawk SSO settings + +### Attribute Mapping + +Configure these mappings in Hawk: + +- **Email**: `email` +- **Name**: `name` (full name - combines firstName and lastName from Keycloak) +- **Department** (optional): `department` +- **Title** (optional): `title` + +### Name ID Format + +Select: **Email address (urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress)** + +## Testing SSO Flow + +### Manual Test + +1. Configure SSO in Hawk workspace settings with the values above +2. Enable SSO for the workspace +3. Navigate to: http://localhost:4000/auth/sso/saml/{workspaceId} +4. You'll be redirected to Keycloak login page +5. Login with any test user (e.g., `testuser@hawk.local` / `password123`) +6. After successful authentication, you'll be redirected back to Hawk with tokens + +### Automated Test + +Run integration tests: + +```bash +cd api +yarn test:integration +``` + +## Troubleshooting + +### Keycloak not starting + +Check Docker logs: +```bash +docker-compose logs keycloak +``` + +### Realm already exists + +If you need to reset: +```bash +docker-compose down -v +docker-compose up keycloak +``` + +### Certificate issues + +If SAML validation fails: +1. Verify the certificate is copied correctly (no extra spaces/newlines) +2. Ensure you copied the certificate content without BEGIN/END markers +3. Check Keycloak logs for signature errors + +### Get SAML Metadata + +You can view the full SAML metadata descriptor at: +``` +http://localhost:8180/realms/hawk/protocol/saml/descriptor +``` + +This contains all technical details about the IdP configuration. + +## Files + +Files are located in `api/test/integration/keycloak/`: + +- `import/hawk-realm.json` - Keycloak realm configuration +- `setup.sh` - Automated setup script + +## Advanced Configuration + +### Custom Workspace ID + +To test with a different workspace ID, update the ACS URL in the Keycloak Admin Console: + +1. Go to Clients → hawk-sp +2. Update `saml_assertion_consumer_url_post` attribute +3. Save changes + +### Additional Users + +You can add more users through: +- Keycloak Admin Console → Users → Add User +- Or update `api/test/integration/keycloak/import/hawk-realm.json` and re-import + +### Different Port + +If you need to run Keycloak on a different port: + +1. Update `KC_HTTP_PORT` in `docker-compose.yml` +2. Update port mapping in `docker-compose.yml` +3. Update all URLs in this README +4. Update `api/test/integration/keycloak/import/hawk-realm.json` with new URLs diff --git a/jest.config.js b/jest.config.js index 7cfa2195..1e7f792d 100644 --- a/jest.config.js +++ b/jest.config.js @@ -15,11 +15,18 @@ module.exports = { */ preset: '@shelf/jest-mongodb', + /** + * Setup file to provide global APIs needed by MongoDB driver + */ + setupFilesAfterEnv: ['/test/setup.ts'], + /** * TypeScript support */ transform: { - '^.+\\.tsx?$': 'ts-jest', + '^.+\\.tsx?$': ['ts-jest', { + tsconfig: 'test/tsconfig.json', + }], }, /** diff --git a/package.json b/package.json index d710f56c..f917baad 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "hawk.api", - "version": "1.2.31", + "version": "1.3.0", "main": "index.ts", "license": "BUSL-1.1", "scripts": { @@ -20,8 +20,10 @@ "test:integration:down": "docker compose -f docker-compose.test.yml down --volumes" }, "devDependencies": { - "@shelf/jest-mongodb": "^1.2.2", + "@shelf/jest-mongodb": "^6.0.2", + "@swc/core": "^1.3.0", "@types/jest": "^26.0.8", + "@types/xml2js": "^0.4.14", "eslint": "^6.7.2", "eslint-config-codex": "1.2.4", "eslint-plugin-import": "^2.19.1", @@ -31,7 +33,8 @@ "redis-mock": "^0.56.3", "ts-jest": "^26.1.4", "ts-node": "^10.9.1", - "typescript": "^4.7.4" + "typescript": "^4.7.4", + "xml2js": "^0.6.2" }, "dependencies": { "@ai-sdk/openai": "^2.0.64", @@ -40,10 +43,10 @@ "@graphql-tools/schema": "^8.5.1", "@graphql-tools/utils": "^8.9.0", "@hawk.so/nodejs": "^3.1.1", - "@hawk.so/types": "^0.1.37", + "@hawk.so/types": "^0.4.2", "@n1ru4l/json-patch-plus": "^0.2.0", + "@node-saml/node-saml": "^5.0.1", "@types/amqp-connection-manager": "^2.0.4", - "@types/bson": "^4.0.5", "@types/debug": "^4.1.5", "@types/escape-html": "^1.0.0", "@types/graphql-upload": "^8.0.11", @@ -51,7 +54,6 @@ "@types/lodash.clonedeep": "^4.5.9", "@types/lodash.mergewith": "^4.6.9", "@types/mime-types": "^2.1.0", - "@types/mongodb": "^3.6.20", "@types/morgan": "^1.9.10", "@types/node": "^16.11.46", "@types/safe-regex": "^1.1.6", @@ -64,7 +66,6 @@ "aws-sdk": "^2.1174.0", "axios": "^0.27.2", "body-parser": "^1.19.0", - "bson": "^4.6.5", "cloudpayments": "^6.0.1", "codex-accounting-sdk": "https://github.com/codex-team/codex-accounting-sdk.git", "dataloader": "^2.0.0", @@ -81,7 +82,7 @@ "lodash.mergewith": "^4.6.2", "migrate-mongo": "^7.0.1", "mime-types": "^2.1.25", - "mongodb": "^3.7.3", + "mongodb": "^6.0.0", "morgan": "^1.10.1", "prom-client": "^15.1.3", "redis": "^4.7.0", @@ -89,5 +90,8 @@ "ts-node-dev": "^2.0.0", "uuid": "^8.3.2", "zod": "^3.25.76" + }, + "resolutions": { + "bson": "^6.7.0" } } diff --git a/src/dataLoaders.ts b/src/dataLoaders.ts index c41a5458..377bd702 100644 --- a/src/dataLoaders.ts +++ b/src/dataLoaders.ts @@ -1,5 +1,5 @@ import DataLoader from 'dataloader'; -import { Db, ObjectId } from 'mongodb'; +import { Db, ObjectId, WithId } from 'mongodb'; import { PlanDBScheme, UserDBScheme, WorkspaceDBScheme, ProjectDBScheme, EventData, EventAddons } from '@hawk.so/types'; type EventDbScheme = { @@ -47,7 +47,7 @@ export default class DataLoaders { */ public userByEmail = new DataLoader( (userEmails) => - this.batchByField('users', userEmails, 'email'), + this.batchByField('users', 'email', userEmails), { cache: false } ); @@ -69,41 +69,51 @@ export default class DataLoaders { * @param collectionName - collection name to get entities * @param ids - ids for resolving */ - private async batchByIds(collectionName: string, ids: ReadonlyArray): Promise<(T | null | Error)[]> { - return this.batchByField(collectionName, ids.map(id => new ObjectId(id)), '_id'); + private async batchByIds( + collectionName: string, + ids: ReadonlyArray + ): Promise<(WithId | null)[]> { + return this.batchByField(collectionName, '_id', ids.map(id => new ObjectId(id))); } /** * Batching function for resolving entities by certain field * @param collectionName - collection name to get entities - * @param values - values for resolving * @param fieldName - field name to resolve + * @param values - values for resolving */ private async batchByField< - // eslint-disable-next-line @typescript-eslint/no-explicit-any - T extends { [key: string]: any }, - FieldType extends ObjectId | string - >(collectionName: string, values: ReadonlyArray, fieldName: string): Promise<(T | null | Error)[]> { + T extends Record, + FieldType extends keyof T + >( + collectionName: string, + fieldName: FieldType, + values: ReadonlyArray + ): Promise<(WithId | null)[]> { + type Doc = WithId; const valuesMap = new Map(); for (const value of values) { valuesMap.set(value.toString(), value); } - const queryResult = await this.dbConnection.collection(collectionName) + const queryResult = await this.dbConnection + .collection(collectionName) .find({ [fieldName]: { $in: Array.from(valuesMap.values()) }, - }) + } as any) .toArray(); /** * Map for making associations between given id and fetched entity * It's because MongoDB `find` mixed all entities */ - const entitiesMap: Record = {}; + const entitiesMap: Record = {}; + + queryResult.forEach((entity) => { + const key = entity[fieldName as keyof Doc]; - queryResult.forEach((entity: T) => { - entitiesMap[entity[fieldName].toString()] = entity; + entitiesMap[key.toString()] = entity; }, {}); return values.map((field) => entitiesMap[field.toString()] || null); diff --git a/src/directives/definedOnlyForAdmins.ts b/src/directives/definedOnlyForAdmins.ts new file mode 100644 index 00000000..8a95295e --- /dev/null +++ b/src/directives/definedOnlyForAdmins.ts @@ -0,0 +1,99 @@ +import { defaultFieldResolver, GraphQLSchema } from 'graphql'; +import { mapSchema, MapperKind, getDirective } from '@graphql-tools/utils'; +import { ResolverContextWithUser, UnknownGraphQLResolverResult } from '../types/graphql'; +import WorkspaceModel from '../models/workspace'; + +/** + * Check if user is admin of workspace + * @param context - resolver context + * @param workspaceId - workspace id to check + * @returns true if user is admin, false otherwise + */ +async function isUserAdminOfWorkspace(context: ResolverContextWithUser, workspaceId: string): Promise { + try { + const workspace = await context.factories.workspacesFactory.findById(workspaceId); + + if (!workspace) { + return false; + } + + const member = await workspace.getMemberInfo(context.user.id); + + if (!member || WorkspaceModel.isPendingMember(member)) { + return false; + } + + return member.isAdmin || false; + } catch { + return false; + } +} + +/** + * Defines directive for fields that are only defined for admins + * Returns null for non-admin users instead of throwing error + * + * Works with object fields where parent object has _id field (workspace id) + * + * Usage: + * type Workspace { + * sso: WorkspaceSsoConfig @definedOnlyForAdmins + * } + */ +export default function definedOnlyForAdminsDirective(directiveName = 'definedOnlyForAdmins') { + return { + definedOnlyForAdminsDirectiveTypeDefs: ` + """ + Field is only defined for admins. Returns null for non-admin users. + Works with object fields where parent object has _id field (workspace id). + """ + directive @${directiveName} on FIELD_DEFINITION + `, + definedOnlyForAdminsDirectiveTransformer: (schema: GraphQLSchema) => + mapSchema(schema, { + [MapperKind.OBJECT_FIELD]: (fieldConfig, fieldName, typeName) => { + const definedOnlyForAdminsDirective = getDirective(schema, fieldConfig, directiveName)?.[0]; + + if (definedOnlyForAdminsDirective) { + const { + resolve = defaultFieldResolver, + } = fieldConfig; + + /** + * New field resolver that checks admin rights + * @param resolverArgs - default GraphQL resolver args + */ + fieldConfig.resolve = async (...resolverArgs): UnknownGraphQLResolverResult => { + const [parent, , context] = resolverArgs; + + /** + * Get workspace ID from parent object + * Parent should have _id field (workspace) + */ + if (!parent || !parent._id) { + return null; + } + + const workspaceId = parent._id.toString(); + + /** + * Check if user is admin + */ + const isAdmin = await isUserAdminOfWorkspace(context, workspaceId); + + if (!isAdmin) { + return null; + } + + /** + * Call original resolver + */ + return resolve(...resolverArgs); + }; + } + + return fieldConfig; + }, + }), + }; +} diff --git a/src/index.ts b/src/index.ts index d84776b5..98f7a62e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -30,6 +30,7 @@ import { metricsMiddleware, createMetricsServer, graphqlMetricsPlugin } from './ import { requestLogger } from './utils/logger'; import ReleasesFactory from './models/releasesFactory'; import RedisHelper from './redisHelper'; +import { appendSsoRoutes } from './sso'; /** * Option to enable playground @@ -246,6 +247,22 @@ class HawkAPI { await redis.initialize(); + /** + * Setup shared factories for SSO routes + * SSO endpoints don't require per-request DataLoaders isolation, + * so we can reuse the same factories instance + * Created here to avoid duplication with createContext + */ + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const ssoDataLoaders = new DataLoaders(mongo.databases.hawk!); + const ssoFactories = HawkAPI.setupFactories(ssoDataLoaders); + + /** + * Append SSO routes to Express app using shared factories + * Note: This must be called after database connections are established + */ + appendSsoRoutes(this.app, ssoFactories); + await this.server.start(); this.app.use(graphqlUploadExpress()); this.server.applyMiddleware({ app: this.app }); diff --git a/src/integrations/vercel-ai/index.ts b/src/integrations/vercel-ai/index.ts index 48010f25..b063b4c4 100644 --- a/src/integrations/vercel-ai/index.ts +++ b/src/integrations/vercel-ai/index.ts @@ -17,19 +17,19 @@ class VercelAIApi { /** * @todo make it dynamic, get from project settings */ - this.modelId = 'gpt-4o'; + this.modelId = 'deepseek/deepseek-v3.1'; } /** * Generate AI suggestion for the event * - * @param {EventData} payload - event data + * @param {EventData} payload - event data to make suggestion * @returns {Promise} AI suggestion for the event * @todo add defence against invalid prompt injection */ public async generateSuggestion(payload: EventData) { const { text } = await generateText({ - model: openai(this.modelId), + model: this.modelId, system: ctoInstruction, prompt: eventSolvingInput(payload), }); diff --git a/src/metrics/mongodb.ts b/src/metrics/mongodb.ts index f13087b4..44fd608c 100644 --- a/src/metrics/mongodb.ts +++ b/src/metrics/mongodb.ts @@ -260,6 +260,16 @@ function logCommandFailed(event: any): void { * @param client - MongoDB client to monitor */ export function setupMongoMetrics(client: MongoClient): void { + /** + * Skip setup in test environment + */ + if ( + process.env.NODE_ENV === 'test' || + process.env.NODE_ENV === 'e2e' + ) { + return; + } + client.on('commandStarted', (event) => { storeCommandInfo(event); @@ -317,7 +327,11 @@ export function setupMongoMetrics(client: MongoClient): void { .observe(duration); // Track error - const errorCode = event.failure?.code?.toString() || 'unknown'; + /** + * MongoDB failure objects may have additional properties like 'code' + * that aren't part of the standard Error type + */ + const errorCode = (event.failure as any)?.code?.toString() || 'unknown'; mongoCommandErrors .labels(metadata.commandName, errorCode) diff --git a/src/models/abstactModelFactory.ts b/src/models/abstactModelFactory.ts index 16d337cd..9ca87d65 100644 --- a/src/models/abstactModelFactory.ts +++ b/src/models/abstactModelFactory.ts @@ -1,10 +1,10 @@ -import { Collection, Db, ObjectID } from 'mongodb'; +import { Collection, Db, Document, ObjectId } from 'mongodb'; import AbstractModel, { ModelConstructor } from './abstractModel'; /** * Model Factory class */ -export default abstract class AbstractModelFactory> { +export default abstract class AbstractModelFactory> { /** * Database connection to interact with */ @@ -17,11 +17,8 @@ export default abstract class AbstractModelFactory; /** * Creates factory instance @@ -44,7 +41,12 @@ export default abstract class AbstractModelFactory, but Model constructor expects DBScheme. + * Since WithId is DBScheme & { _id: ObjectId } and DBScheme already + * includes _id: ObjectId, they are structurally compatible. + */ + return new this.Model(searchResult as DBScheme); } /** @@ -53,13 +55,18 @@ export default abstract class AbstractModelFactory { const searchResult = await this.collection.findOne({ - _id: new ObjectID(id), - }); + _id: new ObjectId(id), + } as any); if (!searchResult) { return null; } - return new this.Model(searchResult); + /** + * MongoDB returns WithId, but Model constructor expects DBScheme. + * Since WithId is DBScheme & { _id: ObjectId } and DBScheme already + * includes _id: ObjectId, they are structurally compatible. + */ + return new this.Model(searchResult as DBScheme); } } diff --git a/src/models/abstractModel.ts b/src/models/abstractModel.ts index 2b12a4f9..9c8830c0 100644 --- a/src/models/abstractModel.ts +++ b/src/models/abstractModel.ts @@ -1,15 +1,15 @@ -import { Collection, Db } from 'mongodb'; +import { Collection, Db, Document } from 'mongodb'; import { databases } from '../mongo'; /** * Model constructor type */ -export type ModelConstructor> = new (modelData: DBScheme) => Model; +export type ModelConstructor> = new (modelData: DBScheme) => Model; /** * Base model */ -export default abstract class AbstractModel { +export default abstract class AbstractModel { /** * Database connection to interact with DB */ @@ -19,7 +19,7 @@ export default abstract class AbstractModel { /** * Model's collection */ - protected abstract collection: Collection; + protected abstract collection: Collection; /** * Creates model instance @@ -32,10 +32,16 @@ export default abstract class AbstractModel { /** * Update entity data * @param query - query to match - * @param data - update data + * @param data - update data (supports MongoDB dot notation for nested fields) * @return number of documents modified */ - public async update(query: object, data: object): Promise { - return (await this.collection.updateOne(query, { $set: data })).modifiedCount; + public async update(query: object, data: Partial | Record): Promise { + /** + * Type assertion is needed because MongoDB's updateOne accepts both + * Partial (for regular updates) and Record + * (for dot notation like 'identities.workspaceId.saml.id'), but the + * type system requires MatchKeysAndValues. + */ + return (await this.collection.updateOne(query, { $set: data as any })).modifiedCount; } } diff --git a/src/models/eventsFactory.js b/src/models/eventsFactory.js index 383387be..bf3efb13 100644 --- a/src/models/eventsFactory.js +++ b/src/models/eventsFactory.js @@ -7,7 +7,7 @@ import ChartDataService from '../services/chartDataService'; const Factory = require('./modelFactory'); const mongo = require('../mongo'); const Event = require('../models/event'); -const { ObjectID } = require('mongodb'); +const { ObjectId } = require('mongodb'); const { composeEventPayloadByRepetition } = require('../utils/merge'); const MAX_DB_READ_BATCH_SIZE = Number(process.env.MAX_DB_READ_BATCH_SIZE); @@ -174,7 +174,7 @@ class EventsFactory extends Factory { /** * Find event by id * - * @param {string|ObjectID} id - event's id + * @param {string|ObjectId} id - event's id * @returns {Event|null} */ async findById(id) { @@ -282,7 +282,7 @@ class EventsFactory extends Factory { $and: [ { groupingTimestamp: paginationCursor.groupingTimestampBoundary }, { [sort]: paginationCursor.sortValueBoundary }, - { _id: { $lte: new ObjectID(paginationCursor.idBoundary) } }, + { _id: { $lte: new ObjectId(paginationCursor.idBoundary) } }, ], }, ], @@ -654,7 +654,7 @@ class EventsFactory extends Factory { /** * Returns Event repetitions * - * @param {string|ObjectID} originalEventId - id of the original event + * @param {string|ObjectId} originalEventId - id of the original event * @param {Number} limit - count limitations * @param {Number} cursor - pointer to the next repetition * @@ -663,7 +663,7 @@ class EventsFactory extends Factory { async getEventRepetitions(originalEventId, limit = 10, cursor = null) { limit = this.validateLimit(limit); - cursor = cursor ? new ObjectID(cursor) : null; + cursor = cursor ? new ObjectId(cursor) : null; const result = { repetitions: [], @@ -766,7 +766,7 @@ class EventsFactory extends Factory { */ const repetition = await this.getCollection(this.TYPES.REPETITIONS) .findOne({ - _id: ObjectID(repetitionId), + _id: new ObjectId(repetitionId), }); const originalEvent = await this.eventsDataLoader.load(originalEventId); @@ -828,8 +828,8 @@ class EventsFactory extends Factory { async visitEvent(eventId, userId) { const result = await this.getCollection(this.TYPES.EVENTS) .updateOne( - { _id: new ObjectID(eventId) }, - { $addToSet: { visitedBy: new ObjectID(userId) } } + { _id: new ObjectId(eventId) }, + { $addToSet: { visitedBy: new ObjectId(userId) } } ); if (result.matchedCount === 0) { @@ -856,7 +856,7 @@ class EventsFactory extends Factory { throw new Error(`Event not found for eventId: ${eventId}`); } - const query = { _id: new ObjectID(event._id) }; + const query = { _id: new ObjectId(event._id) }; const markKey = `marks.${mark}`; @@ -908,7 +908,7 @@ class EventsFactory extends Factory { async updateAssignee(eventId, assignee) { const collection = this.getCollection(this.TYPES.EVENTS); - const query = { _id: new ObjectID(eventId) }; + const query = { _id: new ObjectId(eventId) }; const update = { $set: { assignee: assignee }, diff --git a/src/models/model.js b/src/models/model.js index f2624a3c..cd4e381f 100644 --- a/src/models/model.js +++ b/src/models/model.js @@ -1,9 +1,9 @@ const mongodbDriver = require('mongodb'); -const ObjectID = mongodbDriver.ObjectID; +const ObjectId = mongodbDriver.ObjectId; /** * @typedef {Object} BaseModel - * @typedef {string|ObjectID} id - record id + * @typedef {string|ObjectId} id - record id */ /** @@ -44,7 +44,7 @@ class Model { */ static async findById(id) { const searchResult = await this.collection.findOne({ - _id: new ObjectID(id), + _id: new ObjectId(id), }); return new this({ diff --git a/src/models/notify.js b/src/models/notify.js index 729d6aef..a32d8f1a 100644 --- a/src/models/notify.js +++ b/src/models/notify.js @@ -7,8 +7,8 @@ /** * @typedef {Object} NotificationSettingsSchema - * @property {ObjectID|string} id - notify ID - * @property {ObjectID|string} [userId] - user ID + * @property {ObjectId|string} id - notify ID + * @property {ObjectId|string} [userId] - user ID * @property {ReceiveTypes} receiveType * @property {string} words - filter words when action type is INCLUDING * @property {ProviderSettings[]} providers - notify settings @@ -50,7 +50,7 @@ class Notify { } /** - * @return {string|ObjectID} + * @return {string|ObjectId} */ get id() { return this._id; diff --git a/src/models/paymentRequest.js b/src/models/paymentRequest.js deleted file mode 100644 index 92978f8c..00000000 --- a/src/models/paymentRequest.js +++ /dev/null @@ -1,86 +0,0 @@ -const crypto = require('crypto'); -const User = require('../models/user').default; - -const EmailCompany = process.env.BILLING_COMPANY_EMAIL; -const OSNTaxation = 'osn'; -const TaxNone = 'none'; -const PaymentDescription = 'Card check payment. It will be refunded.'; - -/** - * PaymentRequest model - */ -class PaymentRequest { - /** - * Return payment request JSON object - * @param {Object} paymentRequest - payment params - * @param {UserSchema} userData - user's data - * @param {String} orderId - unique order identifier - */ - static generatePaymentObject(paymentRequest, userData) { - return { - Amount: paymentRequest.amount, - OrderId: paymentRequest.orderId, - Recurrent: paymentRequest.recurrent, - Language: paymentRequest.language, - CustomerKey: userData.id, - Description: PaymentDescription, - Receipt: { - Email: userData.email, - EmailCompany, - Taxation: OSNTaxation, - Items: [ { - Name: 'Deposit', - Price: paymentRequest.amount, - Quantity: 1, - Amount: paymentRequest.amount, - Tax: TaxNone, - } ], - }, - PayType: 'T', - DATA: paymentRequest.data, - }; - } - - /** - * Generate unique payment Id - * @return {string} - */ - static generateOrderId() { - return crypto.randomBytes(8).toString('hex'); - } - - /** - * Create new payment - * @param {String} userId - user's id - * @param {Object} paymentQuery - payment query params - * @returns {Object} - payment object - */ - static async create(userId, paymentQuery) { - const userData = await User.findById(userId); - const paymentObject = PaymentRequest.generatePaymentObject(paymentQuery, userData); - - console.log('INIT =>', paymentObject); - - return paymentObject; - } - - /** - * Run API Init action - * @param {String} userId - user's id - * @param {Object} paymentInitQuery - payment params - * @return {Object} - payment response object from bank - */ - static async apiInitPayment(userId, paymentInitQuery) { - // const paymentRequest = await PaymentRequest.create(userId, paymentInitQuery); - const result = {}; // Init payment request to API - - console.log(`Got result for Init payment: ${JSON.stringify(result)}`); - if (!result.Success) { - throw Error(`Merchant API error: ${result.Message} ${result.Details}`); - } - - return result; - } -} - -module.exports = PaymentRequest; diff --git a/src/models/paymentTransaction.js b/src/models/paymentTransaction.js deleted file mode 100644 index fbf4ff42..00000000 --- a/src/models/paymentTransaction.js +++ /dev/null @@ -1,55 +0,0 @@ -const mongo = require('../mongo'); -const Model = require('./model'); - -/** - * @typedef {Object} PaymentTransaction - * @property {string} id - transaction unique id - * @property {string} userId - user id - * @property {string} workspaceId - workspace id - * @property {number} amount - payment amount in kopecs - * @property {string} orderId - order id (local) - * @property {string} paymentId - payment id (Tinkoff side) - * @property {number} timestamp - create timestamp - */ - -/** - * PaymentTransaction model - */ -class PaymentTransaction extends Model { - /** - * Creates PaymentTransaction instance - * @param {PaymentTransaction} transactionData - transaction data - */ - constructor(transactionData) { - super(); - this.id = transactionData.id; - this.userId = transactionData.userId; - this.workspaceId = transactionData.workspaceId; - this.amount = transactionData.amount; - this.orderId = transactionData.orderId; - this.paymentId = transactionData.paymentId; - this.paymentType = transactionData.paymentType; - this.timestamp = transactionData.timestamp; - this.status = transactionData.status; - this.cardId = transactionData.cardId; - } - - /** - * Model's collection - * @return {Collection} - */ - static get collection() { - return mongo.databases.hawk.collection('paymentTransactions'); - } - - /** - * Creates new payment transaction - * @param {PaymentTransaction} transactionData - transaction data - * @returns {Promise} - created transaction - */ - static async create(transactionData) { - return new PaymentTransaction(transactionData); - } -} - -module.exports = PaymentTransaction; diff --git a/src/models/project.ts b/src/models/project.ts index d6b7350a..ad82451c 100644 --- a/src/models/project.ts +++ b/src/models/project.ts @@ -393,11 +393,11 @@ export default class ProjectModel extends AbstractModel impleme }, }, { - returnOriginal: false, + returnDocument: 'after', } ); - return result.value?.notifications.find(doc => doc._id.toString() === payload.ruleId) || null; + return result?.notifications.find((doc: any) => doc._id.toString() === payload.ruleId) || null; } /** @@ -417,10 +417,10 @@ export default class ProjectModel extends AbstractModel impleme }, }, { - returnOriginal: false, + returnDocument: 'after', }); - return result.value?.notifications.find(doc => doc._id.toString() === ruleId) || null; + return result?.notifications.find((doc: any) => doc._id.toString() === ruleId) || null; } /** @@ -456,11 +456,11 @@ export default class ProjectModel extends AbstractModel impleme }, }, { - returnOriginal: false, + returnDocument: 'after', } ); - return result.value?.notifications.find(doc => doc._id.toString() === ruleId) || null; + return result?.notifications.find((doc) => doc._id.toString() === ruleId) || null; } /** @@ -476,16 +476,16 @@ export default class ProjectModel extends AbstractModel impleme { $set: projectData, }, - { returnOriginal: false } + { returnDocument: 'after' } ); } catch (e) { throw new Error('Can\'t update project'); } - if (!result.value) { + if (!result) { throw new Error('There is no project with provided id'); } - return result.value; + return result; } /** diff --git a/src/models/projectToWorkspace.js b/src/models/projectToWorkspace.js index 02e2384d..a51ba738 100644 --- a/src/models/projectToWorkspace.js +++ b/src/models/projectToWorkspace.js @@ -1,10 +1,10 @@ const mongo = require('../mongo'); -const { ObjectID } = require('mongodb'); +const { ObjectId } = require('mongodb'); /** * @typedef {Object} ProjectToWorkspaceSchema - * @property {string|ObjectID} id - ProjectWorkspace ID - * @property {string|ObjectID} projectId - project ID + * @property {string|ObjectId} id - ProjectWorkspace ID + * @property {string|ObjectId} projectId - project ID * @property {string} [projectUri] - project unique URI */ @@ -15,10 +15,10 @@ const { ObjectID } = require('mongodb'); class ProjectToWorkspace { /** * Creates an instance of ProjectToWorkspace - * @param {string|ObjectID} workspaceId + * @param {string|ObjectId} workspaceId */ constructor(workspaceId) { - this.workspaceId = new ObjectID(workspaceId); + this.workspaceId = new ObjectId(workspaceId); this.collection = mongo.databases.hawk.collection( 'projects:' + workspaceId ); @@ -47,12 +47,12 @@ class ProjectToWorkspace { /** * Find projectWorkspace by ID * - * @param {string|ObjectID} projectWorkspaceId + * @param {string|ObjectId} projectWorkspaceId * @returns {Promise} */ async findById(projectWorkspaceId) { const projectWorkspace = await this.collection.findOne({ - _id: new ObjectID(projectWorkspaceId), + _id: new ObjectId(projectWorkspaceId), }); if (!projectWorkspace) { @@ -68,24 +68,19 @@ class ProjectToWorkspace { /** * Creates new projects: document * - * @param {{projectId: ObjectID}} projectToWorkspaceData - * @returns {Promise} + * @param {{projectId: ObjectId}} projectToWorkspaceData + * @returns {Promise} */ async add(projectToWorkspaceData) { - const projectToWorkspace = await this.collection.insertOne( - projectToWorkspaceData - ); + const projectToWorkspaceResult = await this.collection.insertOne(projectToWorkspaceData); - return { - id: projectToWorkspace.insertedId, - ...projectToWorkspace, - }; + return projectToWorkspaceResult.acknowledged; } /** * Remove project from workspace * - * @param {ObjectID} projectId - project Id for removing + * @param {ObjectId} projectId - project Id for removing * * @return {Promise} */ @@ -97,11 +92,11 @@ class ProjectToWorkspace { * Gets projects in workspace. * If ids were not passed, returns all projects in workspace. * - * @param {string[]|ObjectID[]} ids - project(s) id(s) + * @param {string[]|ObjectId[]} ids - project(s) id(s) * @returns {ProjectSchema[]} */ async getProjects(ids = []) { - ids = ids.map(id => new ObjectID(id)); + ids = ids.map(id => new ObjectId(id)); const pipleine = [ { diff --git a/src/models/user.ts b/src/models/user.ts index 45fc6c17..26c696db 100644 --- a/src/models/user.ts +++ b/src/models/user.ts @@ -1,8 +1,7 @@ import argon2 from 'argon2'; import crypto from 'crypto'; import jwt, { Secret } from 'jsonwebtoken'; -import { OptionalId } from '../mongo'; -import { Collection, ObjectId } from 'mongodb'; +import { Collection, ObjectId, OptionalId } from 'mongodb'; import AbstractModel from './abstractModel'; import objectHasOnlyProps from '../utils/objectHasOnlyProps'; import { NotificationsChannelsDBScheme } from '../types/notification-channels'; @@ -80,7 +79,7 @@ type UserProjectsLastVisitDBScheme = Record; /** * User model */ -export default class UserModel extends AbstractModel implements UserDBScheme { +export default class UserModel extends AbstractModel> implements UserDBScheme { /** * User's id */ @@ -142,16 +141,35 @@ export default class UserModel extends AbstractModel implements Us */ public utm?: UserDBScheme['utm']; + /** + * External identities for SSO (keyed by workspaceId) + */ + public identities?: { + [workspaceId: string]: { + saml: { + /** + * NameID value from IdP (stable identifier) + */ + id: string; + + /** + * Email at the time of linking (for audit) + */ + email: string; + }; + }; + }; + /** * Model's collection */ - protected collection: Collection; + protected collection: Collection>; /** * Model constructor * @param modelData - user data */ - constructor(modelData: UserDBScheme) { + constructor(modelData: OptionalId) { /** * Fallback for name using email */ @@ -161,7 +179,7 @@ export default class UserModel extends AbstractModel implements Us super(modelData); - this.collection = this.dbConnection.collection('users'); + this.collection = this.dbConnection.collection('users'); } /** @@ -284,8 +302,15 @@ export default class UserModel extends AbstractModel implements Us /** * Generates JWT + * + * @param isSsoEnforced - if true, use shorter token lifetime (2 days instead of 30) */ - public async generateTokensPair(): Promise { + public async generateTokensPair(isSsoEnforced = false): Promise { + /** + * Use shorter refresh token expiry for SSO users to enforce re-authentication + */ + const refreshTokenExpiry = isSsoEnforced ? '2d' : '30d'; + const accessToken = await jwt.sign( { userId: this._id, @@ -299,7 +324,7 @@ export default class UserModel extends AbstractModel implements Us userId: this._id, }, process.env.JWT_SECRET_REFRESH_TOKEN as Secret, - { expiresIn: '30d' } + { expiresIn: refreshTokenExpiry } ); return { @@ -374,6 +399,7 @@ export default class UserModel extends AbstractModel implements Us if (!this.workspaces) { return []; } + return Object.keys(this.workspaces); } @@ -418,4 +444,56 @@ export default class UserModel extends AbstractModel implements Us }, }); } + + /** + * Link SAML identity to user for specific workspace + * + * @param workspaceId - workspace ID + * @param samlId - NameID value from IdP (stable identifier) + * @param email - user email at the time of linking + */ + public async linkSamlIdentity(workspaceId: string, samlId: string, email: string): Promise { + /** + * Use Record for MongoDB dot notation keys + */ + const updateData: Record = { + [`identities.${workspaceId}.saml.id`]: samlId, + [`identities.${workspaceId}.saml.email`]: email, + }; + + await this.update( + { _id: new ObjectId(this._id) }, + updateData + ); + + /** + * Update local state + */ + if (!this.identities) { + this.identities = {}; + } + if (!this.identities[workspaceId]) { + this.identities[workspaceId] = { + saml: { + id: samlId, + email, + }, + }; + } else { + this.identities[workspaceId].saml = { + id: samlId, + email, + }; + } + } + + /** + * Get SAML identity for workspace + * + * @param workspaceId - workspace ID + * @returns SAML identity or null if not found + */ + public getSamlIdentity(workspaceId: string): { id: string; email: string } | null { + return this.identities?.[workspaceId]?.saml || null; + } } diff --git a/src/models/userCard.js b/src/models/userCard.js deleted file mode 100644 index 09ba2249..00000000 --- a/src/models/userCard.js +++ /dev/null @@ -1,85 +0,0 @@ -const mongo = require('../mongo'); -const Model = require('./model'); -const { ObjectID } = require('mongodb'); - -/** - * @typedef {Object} UserCardSchema - * @property {string} userId - user's id - * @property {string} pan - card's pan - * @property {Number} rebillId - card's rebill id for recurrent payments - * @property {string} expDate - card's expiration date - */ - -/** - * UserCard model - */ -class UserCard extends Model { - /** - * Creates userCard instance - * @param {UserSchema} userCardData - user's card data - */ - constructor(userCardData) { - super(); - this.userId = userCardData.userId; - this.pan = userCardData.pan; - this.rebillId = userCardData.rebillId; - this.cardId = userCardData.cardId; - this.expDate = userCardData.expDate; - } - - /** - * Model's collection - * @return {Collection} - */ - static get collection() { - return mongo.databases.hawk.collection('userCards'); - } - - /** - * Get all user's cards - * @param {string} userId - user's id - * @return {Promise} - */ - static async findByUserId(userId) { - return (await this.collection.find({ userId: new ObjectID(userId) })).toArray(); - } - - /** - * Get card info - * @param {string} userId - user's id - * @param {Number} cardId - card's id - * @return {Promise} - */ - static async find(userId, cardId) { - return this.collection.findOne({ - userId: new ObjectID(userId), - cardId, - }); - } - - /** - * Creates new UserCard in DB - * @param {UserCardSchema} userCardData - user's card data - * @returns {Promise} - user details - */ - static async create(userCardData) { - await this.collection.insertOne(userCardData); - - return new UserCard(userCardData); - } - - /** - * Remove UserCard from DB - * @param {Number} cardNumber - user's card number - * @param {string} userId - user's ID - * @returns {Promise} - remove result - */ - static async remove({ cardNumber, userId }) { - return this.collection.deleteOne({ - cardNumber, - userId, - }); - } -} - -module.exports = UserCard; diff --git a/src/models/usersFactory.ts b/src/models/usersFactory.ts index f3c6ff70..ba3ee1de 100644 --- a/src/models/usersFactory.ts +++ b/src/models/usersFactory.ts @@ -1,6 +1,6 @@ import AbstractModelFactory from './abstactModelFactory'; import UserModel from './user'; -import { Collection, Db } from 'mongodb'; +import { Collection, Db, OptionalId } from 'mongodb'; import DataLoaders from '../dataLoaders'; import { UserDBScheme } from '@hawk.so/types'; import { Analytics, AnalyticsEventTypes } from '../utils/analytics'; @@ -8,11 +8,11 @@ import { Analytics, AnalyticsEventTypes } from '../utils/analytics'; /** * Users factory to work with User Model */ -export default class UsersFactory extends AbstractModelFactory { +export default class UsersFactory extends AbstractModelFactory, UserModel> { /** * DataBase collection to work with */ - protected collection: Collection; + protected collection: Collection>; /** * DataLoaders for fetching data from database @@ -72,7 +72,7 @@ export default class UsersFactory extends AbstractModelFactory = { + const userData: OptionalId = { email, password: hashedPassword, notifications: UserModel.generateDefaultNotificationsSettings(email), @@ -121,7 +121,7 @@ export default class UsersFactory extends AbstractModelFactory { - const { result } = await this.collection.deleteOne({ email: email }); + const result = await this.collection.deleteOne({ email: email }); + + return result.acknowledged; + } + + /** + * Find user by SAML identity + * + * @param workspaceId - workspace ID + * @param samlId - NameID value from IdP + * @returns UserModel or null if not found + */ + public async findBySamlIdentity(workspaceId: string, samlId: string): Promise { + const userData = await this.collection.findOne({ + [`identities.${workspaceId}.saml.id`]: samlId, + }); - return !!result.ok; + return userData ? new UserModel(userData) : null; } } diff --git a/src/models/workspace.ts b/src/models/workspace.ts index 2e6886a6..6b0cde0e 100644 --- a/src/models/workspace.ts +++ b/src/models/workspace.ts @@ -5,6 +5,11 @@ import UserModel from './user'; import { ConfirmedMemberDBScheme, MemberDBScheme, PendingMemberDBScheme, WorkspaceDBScheme } from '@hawk.so/types'; import crypto from 'crypto'; +/** + * Used for inserts into team collection which should not have '_id' field + */ +type MemberDBSchemeWithoutId = Omit | Omit; + /** * Workspace model */ @@ -46,6 +51,7 @@ export default class WorkspaceModel extends AbstractModel imp /** * Workspace balance + * @deprecated NOT USED */ public balance!: number; @@ -76,6 +82,11 @@ export default class WorkspaceModel extends AbstractModel imp */ public isDebug?: boolean; + /** + * SSO configuration + */ + public sso?: WorkspaceDBScheme['sso']; + /** * Model's collection */ @@ -84,7 +95,7 @@ export default class WorkspaceModel extends AbstractModel imp /** * Collection with information about team for workspace */ - protected teamCollection: Collection; + protected teamCollection: Collection; /** * Creates Workspace instance @@ -94,7 +105,7 @@ export default class WorkspaceModel extends AbstractModel imp constructor(workspaceData: WorkspaceDBScheme) { super(workspaceData); this.collection = this.dbConnection.collection('workspaces'); - this.teamCollection = this.dbConnection.collection('team:' + this._id.toString()); + this.teamCollection = this.dbConnection.collection('team:' + this._id.toString()); } /** @@ -103,7 +114,7 @@ export default class WorkspaceModel extends AbstractModel imp public static generateInviteHash(): string { return crypto .createHash('sha256') - .update(crypto.randomBytes(256)) + .update(crypto.randomBytes(256).toString('hex')) .digest('hex'); } @@ -409,6 +420,25 @@ export default class WorkspaceModel extends AbstractModel imp ); } + /** + * Update SSO configuration + * @param ssoConfig - SSO configuration to set (or undefined to remove) + */ + public async setSsoConfig(ssoConfig: WorkspaceDBScheme['sso'] | undefined): Promise { + this.sso = ssoConfig; + + await this.collection.updateOne( + { + _id: new ObjectId(this._id), + }, + { + $set: { + sso: this.sso, + }, + } + ); + } + /** * Due date of the current workspace tariff plan */ diff --git a/src/mongo.ts b/src/mongo.ts index 43b4d202..06d39ee6 100644 --- a/src/mongo.ts +++ b/src/mongo.ts @@ -54,10 +54,11 @@ export const mongoClients: MongoClients = { /** * Common params for all connections */ -const connectionConfig: MongoClientOptions = withMongoMetrics({ - useNewUrlParser: true, - useUnifiedTopology: true, -}); +/** + * Common params for all connections + * Note: useNewUrlParser and useUnifiedTopology are deprecated in mongodb 6.x and removed + */ +const connectionConfig: MongoClientOptions = withMongoMetrics({}); /** * Setups connections to the databases (hawk api and events databases) diff --git a/src/redisHelper.ts b/src/redisHelper.ts index 82e64ef0..61a82e6a 100644 --- a/src/redisHelper.ts +++ b/src/redisHelper.ts @@ -139,6 +139,15 @@ export default class RedisHelper { return Boolean(this.redisClient?.isOpen); } + /** + * Get Redis client instance + * + * @returns Redis client or null if not initialized + */ + public getClient(): RedisClientType | null { + return this.redisClient; + } + /** * Execute TS.RANGE command with aggregation * diff --git a/src/resolvers/billing.js b/src/resolvers/billing.js deleted file mode 100644 index 00c3e109..00000000 --- a/src/resolvers/billing.js +++ /dev/null @@ -1,75 +0,0 @@ -const PaymentRequest = require('../models/paymentRequest'); -const UserCard = require('../models/userCard'); -const rabbitmq = require('../rabbitmq'); -const PaymentTransaction = require('../models/paymentTransaction'); - -/** - * @typedef {Object} PaymentQuery - * @property {Number} amount - total payment amount in kopecs - * @property {string} workspaceId - workspace identifier - * @property {string} cardId - card identifier from bank - */ - -/** - * @typedef {Object} PaymentLink - * @property {Number} amount - total payment amount in kopecs - * @property {string} status - payment status - * @property {string} success - if the payment is successfull - * @property {string} paymentURL - URL to the payment page - */ - -/** - * See all types and fields here {@link ../typeDefs/billing.graphql} - */ -module.exports = { - Mutation: { - /** - * API Mutation method for card detach - * @param {ResolverObj} _obj - * @param {Number} cardId - card's identifier - * @param {Object} user - current user object - * @return {Promise} - */ - async removeCard(_obj, { cardId }, { user }) { - return (await UserCard.remove({ - cardId, - userId: user.id, - })).deletedCount === 1; - }, - - /** - * Mutation for getting payment link - * @param {ResolverObj} _obj - * @param {string} language - * @param {Object} user - current user object - * @return {Promise} - */ - async attachCard(_obj, { language }, { user }) { - const orderId = PaymentRequest.generateOrderId(); - const result = await PaymentRequest.apiInitPayment(user.id, { - recurrent: 'Y', - language: language || 'en', - data: { - UserId: user.id, - }, - amount: 100, - orderId: orderId, - }); - const transaction = await PaymentTransaction.create({ - userId: user.id, - amount: result.Amount, - orderId: orderId, - paymentId: result.PaymentId, - status: result.Status, - timestamp: parseInt((Date.now() / 1000).toFixed(0)), - }); - - await rabbitmq.publish('merchant', 'merchant/initialized', JSON.stringify({ - paymentURL: result.PaymentURL, - ...transaction, - })); - - return result; - }, - }, -}; diff --git a/src/resolvers/event.js b/src/resolvers/event.js index 08aee5c7..c3c44971 100644 --- a/src/resolvers/event.js +++ b/src/resolvers/event.js @@ -131,9 +131,9 @@ module.exports = { async visitEvent(_obj, { projectId, eventId }, { user, ...context }) { const factory = getEventsFactory(context, projectId); - const { result } = await factory.visitEvent(eventId, user.id); + const result = await factory.visitEvent(eventId, user.id); - return !!result.ok; + return !!result.acknowledged; }, /** @@ -148,9 +148,9 @@ module.exports = { async toggleEventMark(_obj, { project, eventId, mark }, context) { const factory = getEventsFactory(context, project); - const { result } = await factory.toggleEventMark(eventId, mark); + const result = await factory.toggleEventMark(eventId, mark); - return !!result.ok; + return !!result.acknowledged; }, /** @@ -192,7 +192,7 @@ module.exports = { }; } - const { result } = await factory.updateAssignee(eventId, assignee); + const result = await factory.updateAssignee(eventId, assignee); const assigneeData = await factories.usersFactory.dataLoaders.userById.load(assignee); @@ -207,7 +207,7 @@ module.exports = { }); return { - success: !!result.ok, + success: !!result.acknowledged, record: assigneeData, }; }, @@ -224,10 +224,10 @@ module.exports = { const { projectId, eventId } = input; const factory = getEventsFactory(context, projectId); - const { result } = await factory.updateAssignee(eventId, ''); + const result = await factory.updateAssignee(eventId, ''); return { - success: !!result.ok, + success: !!result.acknowledged, }; }, }, diff --git a/src/resolvers/user.ts b/src/resolvers/user.ts index af62dc80..7cadb46a 100644 --- a/src/resolvers/user.ts +++ b/src/resolvers/user.ts @@ -94,7 +94,41 @@ export default { ): Promise { const user = await factories.usersFactory.findByEmail(email); - if (!user || !(await user.comparePassword(password))) { + if (!user) { + throw new AuthenticationError('Wrong email or password'); + } + + /** + * Check if there is a workspace with enforced SSO + * If user is a member of any workspace with enforced SSO, they must use SSO login + * This check must happen BEFORE password validation to prevent password-based login + * even if the password is correct + */ + const workspacesIds = await user.getWorkspacesIds([]); + const workspaces = await factories.workspacesFactory.findManyByIds(workspacesIds); + + const enforcedWorkspace = workspaces.find(w => w.sso?.enabled && w.sso?.enforced); + + if (enforcedWorkspace) { + const error = new AuthenticationError( + 'SSO_REQUIRED' + ); + + /** + * Add workspace info to extensions for frontend + */ + error.extensions = { + code: 'SSO_REQUIRED', + workspaceName: enforcedWorkspace.name, + workspaceId: enforcedWorkspace._id.toString(), + }; + throw error; + } + + /** + * Only validate password if SSO is not enforced + */ + if (!(await user.comparePassword(password))) { throw new AuthenticationError('Wrong email or password'); } @@ -128,7 +162,15 @@ export default { throw new ApolloError('There is no users with that id'); } - return user.generateTokensPair(); + /** + * Check if user is member of any workspace with enforced SSO + * to use shorter token lifetime + */ + const workspacesIds = await user.getWorkspacesIds([]); + const workspaces = await factories.workspacesFactory.findManyByIds(workspacesIds); + const hasEnforcedSso = workspaces.some(w => w.sso?.enabled && w.sso?.enforced); + + return user.generateTokensPair(hasEnforcedSso); }, /** diff --git a/src/resolvers/workspace.js b/src/resolvers/workspace.js index 8b46a5b1..2333099d 100644 --- a/src/resolvers/workspace.js +++ b/src/resolvers/workspace.js @@ -33,6 +33,35 @@ module.exports = { return factories.workspacesFactory.findManyByIds(await authenticatedUser.getWorkspacesIds(ids)); }, + + /** + * Get workspace public info by ID for SSO login page + * Returns only id, name, image if SSO is enabled for the workspace + * Available without authentication (@allowAnon) + * @param {ResolverObj} _obj - object that contains the result returned from the resolver on the parent field + * @param {String} id - workspace ID + * @param {ContextFactories} factories - factories for working with models + * @return {Object|null} Workspace public info or null if workspace not found or SSO not enabled + */ + async ssoWorkspace(_obj, { id }, { factories }) { + const workspace = await factories.workspacesFactory.findById(id); + + /** + * Check if workspace exists and has SSO enabled + */ + if (!workspace || !(workspace.sso && workspace.sso.enabled)) { + return null; + } + + /** + * Return only public fields: id, name, image + */ + return { + _id: workspace._id, + name: workspace.name, + image: workspace.image || null, + }; + }, }, Mutation: { /** @@ -329,6 +358,62 @@ module.exports = { return true; }, + /** + * Update workspace SSO configuration (admin only) + * Protected by @requireAdmin directive - admin check is done by directive + * @param {ResolverObj} _obj - object that contains the result returned from the resolver on the parent field + * @param {String} workspaceId - workspace ID + * @param {Object} config - SSO configuration + * @param {ContextFactories} factories - factories for working with models + * @return {Promise} + */ + async updateWorkspaceSso(_obj, { workspaceId, config }, { factories }) { + const workspace = await factories.workspacesFactory.findById(workspaceId); + + if (!workspace) { + throw new UserInputError('Workspace not found'); + } + + /** + * Validate configuration + */ + if (config.enabled && !config.saml) { + throw new UserInputError('SAML configuration is required when SSO is enabled'); + } + + /** + * Prepare SSO configuration + * If enabled=false, preserve existing SSO config and only update enabled flag + * If enabled=true, update full SSO configuration + */ + const ssoConfig = config.enabled ? { + enabled: config.enabled, + enforced: config.enforced || false, + type: 'saml', + saml: { + idpEntityId: config.saml.idpEntityId, + ssoUrl: config.saml.ssoUrl, + x509Cert: config.saml.x509Cert, + nameIdFormat: config.saml.nameIdFormat, + attributeMapping: { + email: config.saml.attributeMapping.email, + name: config.saml.attributeMapping.name, + }, + }, + } : workspace.sso ? { + ...workspace.sso, + enabled: false, + } : undefined; + + /** + * Update SSO configuration using model method + * This method handles the update correctly without touching other fields + */ + await workspace.setSsoConfig(ssoConfig); + + return true; + }, + /** * Change workspace plan for default plan mutation implementation * @@ -493,6 +578,28 @@ module.exports = { return new PlanModel(plan); }, + + /** + * SSO configuration (admin only) + * Protected by @definedOnlyForAdmins directive - returns null for non-admin users + * @param {WorkspaceDBScheme} workspace - result from resolver above (parent workspace object) + * @param _args - empty list of args + * @param {UserInContext} context - resolver context + * @returns {Promise} + */ + async sso(workspace, _args, { factories }) { + /** + * Get workspace model to access SSO config + * Admin check is done by @definedOnlyForAdmins directive + */ + const workspaceModel = await factories.workspacesFactory.findById(workspace._id.toString()); + + if (!workspaceModel) { + return null; + } + + return workspaceModel.sso || null; + }, }, /** diff --git a/src/schema.ts b/src/schema.ts index 562e93c0..f2fe5008 100644 --- a/src/schema.ts +++ b/src/schema.ts @@ -9,6 +9,7 @@ import uploadImageDirective from './directives/uploadImageDirective'; import allowAnonDirective from './directives/allowAnon'; import requireAdminDirective from './directives/requireAdmin'; import requireUserInWorkspaceDirective from './directives/requireUserInWorkspace'; +import definedOnlyForAdminsDirective from './directives/definedOnlyForAdmins'; const { renameFromDirectiveTypeDefs, renameFromDirectiveTransformer } = renameFromDirective(); const { defaultValueDirectiveTypeDefs, defaultValueDirectiveTransformer } = defaultValueDirective(); @@ -17,6 +18,7 @@ const { uploadImageDirectiveTypeDefs, uploadImageDirectiveTransformer } = upload const { allowAnonDirectiveTypeDefs, allowAnonDirectiveTransformer } = allowAnonDirective(); const { requireAdminDirectiveTypeDefs, requireAdminDirectiveTransformer } = requireAdminDirective(); const { requireUserInWorkspaceDirectiveTypeDefs, requireUserInWorkspaceDirectiveTransformer } = requireUserInWorkspaceDirective(); +const { definedOnlyForAdminsDirectiveTypeDefs, definedOnlyForAdminsDirectiveTransformer } = definedOnlyForAdminsDirective(); let schema = makeExecutableSchema({ typeDefs: mergeTypeDefs([ @@ -27,6 +29,7 @@ let schema = makeExecutableSchema({ allowAnonDirectiveTypeDefs, requireAdminDirectiveTypeDefs, requireUserInWorkspaceDirectiveTypeDefs, + definedOnlyForAdminsDirectiveTypeDefs, ...typeDefs, ]), resolvers, @@ -39,5 +42,6 @@ schema = uploadImageDirectiveTransformer(schema); schema = requireAdminDirectiveTransformer(schema); schema = allowAnonDirectiveTransformer(schema); schema = requireUserInWorkspaceDirectiveTransformer(schema); +schema = definedOnlyForAdminsDirectiveTransformer(schema); export default schema; diff --git a/src/sso/index.ts b/src/sso/index.ts new file mode 100644 index 00000000..cbe28ea7 --- /dev/null +++ b/src/sso/index.ts @@ -0,0 +1,15 @@ +import express from 'express'; +import { createSamlRouter } from './saml'; +import { ContextFactories } from '../types/graphql'; + +/** + * Append SSO routes to Express app + * + * @param app - Express application instance + * @param factories - context factories for database access + */ +export function appendSsoRoutes(app: express.Application, factories: ContextFactories): void { + const samlRouter = createSamlRouter(factories); + + app.use('/auth/sso/saml', samlRouter); +} diff --git a/src/sso/saml/controller.ts b/src/sso/saml/controller.ts new file mode 100644 index 00000000..8470e014 --- /dev/null +++ b/src/sso/saml/controller.ts @@ -0,0 +1,573 @@ +import express from 'express'; +import { v4 as uuid } from 'uuid'; +import { ObjectId } from 'mongodb'; +import SamlService from './service'; +import { SamlStateStoreInterface } from './store/SamlStateStoreInterface'; +import { ContextFactories } from '../../types/graphql'; +import { SamlResponseData } from '../types'; +import WorkspaceModel from '../../models/workspace'; +import UserModel from '../../models/user'; +import { sgr, Effect } from '../../utils/ansi'; + +/** + * Controller for SAML SSO endpoints + */ +export default class SamlController { + /** + * SAML service instance + */ + private samlService: SamlService; + + /** + * Context factories for database access + */ + private factories: ContextFactories; + + /** + * SAML state store instance + */ + private store: SamlStateStoreInterface; + + /** + * SAML controller constructor used for DI + * + * @param factories - for working with models + * @param store - SAML state store instance + */ + constructor(factories: ContextFactories, store: SamlStateStoreInterface) { + this.samlService = new SamlService(); + this.factories = factories; + this.store = store; + } + + /** + * Initiate SSO login (GET /auth/sso/saml/:workspaceId) + * @param req - Express request + * @param res - Express response + */ + public async initiateLogin(req: express.Request, res: express.Response): Promise { + const { workspaceId } = req.params; + + try { + const returnUrl = (req.query.returnUrl as string) || `/workspace/${workspaceId}`; + + /** + * Validate workspace ID format + */ + if (!this.isValidWorkspaceId(workspaceId)) { + this.log('warn', 'Invalid workspace ID format:', sgr(workspaceId, Effect.ForegroundRed)); + res.status(400).json({ error: 'Invalid workspace ID' }); + + return; + } + + /** + * 1. Check if workspace has SSO enabled + */ + const workspace = await this.factories.workspacesFactory.findById(workspaceId); + + if (!workspace || !workspace.sso?.enabled) { + this.log('warn', 'SSO not enabled for workspace:', sgr(workspaceId, Effect.ForegroundCyan)); + res.status(400).json({ error: 'SSO is not enabled for this workspace' }); + + return; + } + + /** + * 2. Compose Assertion Consumer Service URL + */ + const acsUrl = this.getAcsUrl(workspaceId); + const relayStateId = uuid(); + + /** + * 3. Save RelayState to temporary storage + */ + this.log( + 'info', + '[Store] Saving RelayState:', + sgr(relayStateId.slice(0, 8), Effect.ForegroundGray), + '| Store:', + sgr(this.store.type, Effect.ForegroundBlue), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan) + ); + await this.store.saveRelayState(relayStateId, { + returnUrl, + workspaceId, + }); + this.log('log', '[Store] RelayState saved:', sgr(relayStateId.slice(0, 8), Effect.ForegroundGray)); + + /** + * 4. Generate AuthnRequest + */ + const spEntityId = process.env.SSO_SP_ENTITY_ID || 'NOT_SET'; + + this.log( + 'info', + 'Generating SAML AuthnRequest:', + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '| SP Entity ID:', + sgr(spEntityId, [Effect.ForegroundMagenta, Effect.Bold]), + '| ACS URL:', + sgr(acsUrl, Effect.ForegroundGray) + ); + + const { requestId, encodedRequest } = await this.samlService.generateAuthnRequest( + workspaceId, + acsUrl, + relayStateId, + workspace.sso.saml + ); + + /** + * 5. Save AuthnRequest ID for InResponseTo validation + */ + this.log( + 'info', + '[Store] Saving AuthnRequest:', + sgr(requestId.slice(0, 8), Effect.ForegroundGray), + '| Store:', + sgr(this.store.type, Effect.ForegroundBlue), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan) + ); + await this.store.saveAuthnRequest(requestId, workspaceId); + this.log('log', '[Store] AuthnRequest saved:', sgr(requestId.slice(0, 8), Effect.ForegroundGray)); + + /** + * 6. Redirect to IdP + */ + const redirectUrl = new URL(workspace.sso.saml.ssoUrl); + + redirectUrl.searchParams.set('SAMLRequest', encodedRequest); + redirectUrl.searchParams.set('RelayState', relayStateId); + + this.log( + 'log', + 'Initiating SSO login for workspace:', + sgr(workspaceId, [Effect.ForegroundCyan, Effect.Bold]), + '| Request ID:', + sgr(requestId.slice(0, 8), Effect.ForegroundGray) + ); + + res.redirect(redirectUrl.toString()); + } catch (error) { + this.log( + 'error', + 'SSO initiation error for workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '|', + sgr(error instanceof Error ? error.message : 'Unknown error', Effect.ForegroundRed) + ); + res.status(500).json({ error: 'Failed to initiate SSO login' }); + } + } + + /** + * Handle ACS callback (POST /auth/sso/saml/:workspaceId/acs) + * @param req - Express request object + * @param res - Express response object + * @returns void + */ + public async handleAcs(req: express.Request, res: express.Response): Promise { + const { workspaceId } = req.params; + + try { + const samlResponse = req.body.SAMLResponse as string; + const relayStateId = req.body.RelayState as string; + + /** + * Validate workspace ID format + */ + if (!this.isValidWorkspaceId(workspaceId)) { + this.log('warn', '[ACS] Invalid workspace ID format:', sgr(workspaceId, Effect.ForegroundRed)); + res.status(400).json({ error: 'Invalid workspace ID' }); + + return; + } + + /** + * Validate required SAML response + */ + if (!samlResponse) { + this.log('warn', '[ACS] Missing SAML response for workspace:', sgr(workspaceId, Effect.ForegroundCyan)); + res.status(400).json({ error: 'SAML response is required' }); + + return; + } + + /** + * 1. Get workspace SSO configuration and check if SSO is enabled + */ + const workspace = await this.factories.workspacesFactory.findById(workspaceId); + + if (!workspace || !workspace.sso?.enabled) { + this.log('warn', '[ACS] SSO not enabled for workspace:', sgr(workspaceId, Effect.ForegroundCyan)); + res.status(400).json({ error: 'SSO is not enabled for this workspace' }); + + return; + } + + /** + * 2. Validate and parse SAML Response + */ + const acsUrl = this.getAcsUrl(workspaceId); + + let samlData: SamlResponseData; + + try { + /** + * Validate and parse SAML Response + * Note: InResponseTo validation is done separately after parsing + */ + samlData = await this.samlService.validateAndParseResponse( + samlResponse, + workspaceId, + acsUrl, + workspace.sso.saml + ); + + this.log( + 'log', + '[ACS] SAML response validated for workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '| User:', + sgr(samlData.email, [Effect.ForegroundMagenta, Effect.Bold]) + ); + + /** + * Validate InResponseTo against stored AuthnRequest + */ + if (samlData.inResponseTo) { + this.log( + 'info', + '[Store] Validating AuthnRequest:', + sgr(samlData.inResponseTo.slice(0, 8), Effect.ForegroundGray), + '| Store:', + sgr(this.store.type, Effect.ForegroundBlue), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan) + ); + const isValidRequest = await this.store.validateAndConsumeAuthnRequest( + samlData.inResponseTo, + workspaceId + ); + + if (isValidRequest) { + this.log( + 'log', + '[Store] AuthnRequest validated and consumed:', + sgr(samlData.inResponseTo.slice(0, 8), Effect.ForegroundGray) + ); + } else { + this.log( + 'warn', + '[Store] AuthnRequest validation failed:', + sgr(samlData.inResponseTo.slice(0, 8), Effect.ForegroundRed) + ); + } + + if (!isValidRequest) { + this.log( + 'error', + '[ACS] InResponseTo validation failed for workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '| Request ID:', + sgr(samlData.inResponseTo.slice(0, 8), Effect.ForegroundGray) + ); + res.status(400).json({ error: 'Invalid SAML response: InResponseTo validation failed' }); + + return; + } + } + } catch (error) { + this.log( + 'error', + '[ACS] SAML validation error for workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '|', + sgr(error instanceof Error ? error.message : 'Unknown error', Effect.ForegroundRed) + ); + res.status(400).json({ error: 'Invalid SAML response' }); + + return; + } + + /** + * 3. Find or create user + */ + let user = await this.factories.usersFactory.findBySamlIdentity(workspaceId, samlData.nameId); + + if (!user) { + /** + * JIT provisioning or invite-only policy + */ + this.log( + 'info', + '[ACS] User not found, starting provisioning:', + sgr(samlData.email, Effect.ForegroundMagenta), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan) + ); + user = await this.handleUserProvisioning(workspaceId, samlData, workspace); + } else { + this.log( + 'log', + '[ACS] Existing user found:', + sgr(samlData.email, Effect.ForegroundMagenta), + '| User ID:', + sgr(user._id.toString().slice(0, 8), Effect.ForegroundGray) + ); + } + + /** + * 4. Get RelayState for return URL (before consuming) + * Note: RelayState is consumed after first use, so we need to get it before validation + */ + this.log( + 'info', + '[Store] Getting RelayState:', + sgr(relayStateId.slice(0, 8), Effect.ForegroundGray), + '| Store:', + sgr(this.store.type, Effect.ForegroundBlue) + ); + const relayState = await this.store.getRelayState(relayStateId); + + if (relayState) { + this.log( + 'log', + '[Store] RelayState retrieved and consumed:', + sgr(relayStateId.slice(0, 8), Effect.ForegroundGray), + '| Return URL:', + sgr(relayState.returnUrl, Effect.ForegroundGray) + ); + } else { + this.log('warn', '[Store] RelayState not found or expired:', sgr(relayStateId.slice(0, 8), Effect.ForegroundRed)); + } + + const finalReturnUrl = relayState?.returnUrl || `/workspace/${workspaceId}`; + + /** + * 5. Create Hawk session + * Use shorter token lifetime for enforced SSO workspaces + */ + const tokens = await user.generateTokensPair(workspace.sso?.enforced || false); + + /** + * 6. Redirect to Garage SSO callback page with tokens + * The SSO callback page will save tokens to store and redirect to finalReturnUrl + */ + const callbackPath = `/login/sso/${workspaceId}`; + const frontendUrl = new URL(callbackPath, process.env.GARAGE_URL || 'http://localhost:3000'); + + frontendUrl.searchParams.set('access_token', tokens.accessToken); + frontendUrl.searchParams.set('refresh_token', tokens.refreshToken); + frontendUrl.searchParams.set('returnUrl', finalReturnUrl); + + this.log( + 'success', + '[ACS] ✓ SSO login successful:', + sgr(samlData.email, [Effect.ForegroundMagenta, Effect.Bold]), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '| Redirecting to:', + sgr(callbackPath, Effect.ForegroundGray), + '→', + sgr(finalReturnUrl, Effect.ForegroundGray) + ); + + res.redirect(frontendUrl.toString()); + } catch (error) { + /** + * Handle specific error types + */ + if (error instanceof Error && error.message.includes('SAML')) { + this.log( + 'error', + '[ACS] SAML processing error for workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '|', + sgr(error.message, Effect.ForegroundRed) + ); + res.status(400).json({ error: 'Invalid SAML response' }); + + return; + } + + this.log( + 'error', + '[ACS] ACS callback error for workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '|', + sgr(error instanceof Error ? error.message : 'Unknown error', Effect.ForegroundRed) + ); + res.status(500).json({ error: 'Failed to process SSO callback' }); + } + } + + /** + * Log message with SSO prefix + * + * @param level - log level ('log', 'warn', 'error', 'info', 'success') + * @param args - arguments to log + */ + private log(level: 'log' | 'warn' | 'error' | 'info' | 'success', ...args: unknown[]): void { + /** + * Disable logging in test environment + */ + if (process.env.NODE_ENV === 'test') { + return; + } + + const colors = { + log: Effect.ForegroundGreen, + warn: Effect.ForegroundYellow, + error: Effect.ForegroundRed, + info: Effect.ForegroundBlue, + success: [Effect.ForegroundGreen, Effect.Bold], + }; + + let logger: typeof console.log; + + if (level === 'error') { + logger = console.error; + } else if (level === 'warn') { + logger = console.warn; + } else { + logger = console.log; + } + + logger(sgr('[SSO]', colors[level]), ...args); + } + + /** + * Validate workspace ID format + * + * @param workspaceId - workspace ID to validate + * @returns true if valid, false otherwise + */ + private isValidWorkspaceId(workspaceId: string): boolean { + return ObjectId.isValid(workspaceId); + } + + /** + * Compose Assertion Consumer Service URL for workspace + * + * @param workspaceId - workspace ID + * @returns ACS URL + */ + private getAcsUrl(workspaceId: string): string { + const apiUrl = process.env.API_URL || 'https://api.hawk.so'; + + return `${apiUrl}/auth/sso/saml/${workspaceId}/acs`; + } + + /** + * Handle user provisioning (JIT or invite-only) + * + * @param workspaceId - workspace ID + * @param samlData - parsed SAML response data + * @param workspace - workspace model + * @returns UserModel instance + */ + private async handleUserProvisioning( + workspaceId: string, + samlData: SamlResponseData, + workspace: WorkspaceModel + ): Promise { + try { + /** + * Find user by email + */ + let user = await this.factories.usersFactory.findByEmail(samlData.email); + + if (!user) { + /** + * Create new user (JIT provisioning) + * Password is not set - only SSO login is allowed + */ + this.log( + 'info', + '[Provisioning] Creating new user:', + sgr(samlData.email, [Effect.ForegroundMagenta, Effect.Bold]), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan) + ); + user = await this.factories.usersFactory.create(samlData.email, undefined, undefined); + } + + /** + * Link SAML identity to user + */ + this.log( + 'info', + '[Provisioning] Linking SAML identity for user:', + sgr(samlData.email, Effect.ForegroundMagenta), + '| NameID:', + sgr(samlData.nameId.slice(0, 16) + '...', Effect.ForegroundGray) + ); + await user.linkSamlIdentity(workspaceId, samlData.nameId, samlData.email); + + /** + * Check if user is a member of the workspace + */ + const member = await workspace.getMemberInfo(user._id.toString()); + + if (!member) { + /** + * Add user to workspace (JIT provisioning) + */ + this.log( + 'log', + '[Provisioning] Adding user to workspace:', + sgr(samlData.email, Effect.ForegroundMagenta), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan) + ); + await workspace.addMember(user._id.toString()); + await user.addWorkspace(workspaceId); + } else if (WorkspaceModel.isPendingMember(member)) { + /** + * Confirm pending membership + */ + this.log( + 'log', + '[Provisioning] Confirming pending membership:', + sgr(samlData.email, Effect.ForegroundMagenta), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan) + ); + await workspace.confirmMembership(user); + await user.confirmMembership(workspaceId); + } else { + this.log( + 'log', + '[Provisioning] User already member of workspace:', + sgr(samlData.email, Effect.ForegroundMagenta) + ); + } + + this.log( + 'success', + '[Provisioning] ✓ User provisioning completed:', + sgr(samlData.email, [Effect.ForegroundMagenta, Effect.Bold]), + '| User ID:', + sgr(user._id.toString(), Effect.ForegroundGray) + ); + + return user; + } catch (error) { + this.log( + 'error', + '[Provisioning] Provisioning error for user:', + sgr(samlData.email, Effect.ForegroundMagenta), + '| Workspace:', + sgr(workspaceId, Effect.ForegroundCyan), + '|', + sgr(error instanceof Error ? error.message : 'Unknown error', Effect.ForegroundRed) + ); + throw error; + } + } +} diff --git a/src/sso/saml/index.ts b/src/sso/saml/index.ts new file mode 100644 index 00000000..f7b09308 --- /dev/null +++ b/src/sso/saml/index.ts @@ -0,0 +1,42 @@ +import express from 'express'; +import SamlController from './controller'; +import { createSamlStateStore } from './storeFactory'; +import { ContextFactories } from '../../types/graphql'; + +/** + * Create SAML router + * + * @param factories - context factories for database access + * @returns Express router with SAML endpoints + */ +export function createSamlRouter(factories: ContextFactories): express.Router { + const router = express.Router(); + const store = createSamlStateStore(); + const controller = new SamlController(factories, store); + + /** + * SSO login initiation + * GET /auth/sso/saml/:workspaceId + */ + router.get('/:workspaceId', async (req, res, next) => { + try { + await controller.initiateLogin(req, res); + } catch (error) { + next(error); + } + }); + + /** + * ACS callback + * POST /auth/sso/saml/:workspaceId/acs + */ + router.post('/:workspaceId/acs', async (req, res, next) => { + try { + await controller.handleAcs(req, res); + } catch (error) { + next(error); + } + }); + + return router; +} diff --git a/src/sso/saml/service.ts b/src/sso/saml/service.ts new file mode 100644 index 00000000..b7236f6a --- /dev/null +++ b/src/sso/saml/service.ts @@ -0,0 +1,258 @@ +import { SAML, SamlConfig as NodeSamlConfig, Profile } from '@node-saml/node-saml'; +import { inflateRawSync } from 'zlib'; +import { SamlConfig, SamlResponseData } from '../types'; +import { SamlValidationError, SamlValidationErrorType } from './types'; +import { extractAttribute } from './utils'; + +/** + * Service for SAML SSO operations + */ +export default class SamlService { + /** + * Generate SAML AuthnRequest + * + * AuthnRequest - a SAML-message that Hawk sends to IdP to initiate auth process. + * + * @param workspaceId - workspace ID + * @param acsUrl - Assertion Consumer Service URL + * @param relayState - context of user returning (url + relay state id) + * @param samlConfig - SAML configuration + * @returns AuthnRequest ID and encoded SAML request + */ + public async generateAuthnRequest( + workspaceId: string, + acsUrl: string, + relayState: string, + samlConfig: SamlConfig + ): Promise<{ requestId: string; encodedRequest: string }> { + const saml = this.createSamlInstance(acsUrl, samlConfig); + + /** + * Generate AuthnRequest message + * node-saml returns object with SAMLRequest (deflated + base64 encoded) + */ + const authorizeMessage = await saml.getAuthorizeMessageAsync(relayState, undefined, {}); + + const encodedRequest = authorizeMessage.SAMLRequest as string; + + if (!encodedRequest) { + throw new Error('Failed to generate SAML AuthnRequest'); + } + + /** + * Extract request ID from the generated request + * node-saml generates unique ID internally using generateUniqueId option + * We need to decode and parse to get the ID for InResponseTo validation + */ + const requestId = this.extractRequestIdFromEncodedRequest(encodedRequest); + + return { + requestId, + encodedRequest, + }; + } + + /** + * Validate and parse SAML Response + * + * @param samlResponse - base64-encoded SAML Response + * @param workspaceId - workspace ID + * @param acsUrl - expected Assertion Consumer Service URL + * @param samlConfig - SAML configuration + * @param expectedRequestId - optional expected InResponseTo value (if provided, validates that response matches) + * @returns parsed SAML response data + * @throws SamlValidationError if validation fails + */ + public async validateAndParseResponse( + samlResponse: string, + workspaceId: string, + acsUrl: string, + samlConfig: SamlConfig, + expectedRequestId?: string + ): Promise { + const saml = this.createSamlInstance(acsUrl, samlConfig); + + let profile: Profile; + + try { + /** + * node-saml validates: + * - XML signature using x509Cert + * - Audience (via idpIssuer option) + * - Time conditions (NotBefore, NotOnOrAfter with clock skew) + */ + const result = await saml.validatePostResponseAsync({ + SAMLResponse: samlResponse, + }); + + if (!result.profile) { + throw new SamlValidationError( + SamlValidationErrorType.INVALID_SIGNATURE, + 'SAML response validation failed: no profile returned' + ); + } + + profile = result.profile; + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown SAML validation error'; + + /** + * Determine specific error type based on error message + */ + if (message.includes('signature')) { + throw new SamlValidationError( + SamlValidationErrorType.INVALID_SIGNATURE, + `SAML signature validation failed: ${message}` + ); + } + + if (message.includes('expired') || message.includes('NotOnOrAfter') || message.includes('NotBefore')) { + throw new SamlValidationError( + SamlValidationErrorType.EXPIRED_ASSERTION, + `SAML assertion time validation failed: ${message}` + ); + } + + if (message.includes('audience') || message.includes('Audience')) { + throw new SamlValidationError( + SamlValidationErrorType.INVALID_AUDIENCE, + `SAML audience validation failed: ${message}` + ); + } + + /** + * Fallback for unknown error types + * Note: Error classification relies on message text which may change between library versions + */ + throw new SamlValidationError( + SamlValidationErrorType.VALIDATION_FAILED, + `SAML validation failed: ${message}` + ); + } + + /** + * Extract NameID (Profile type defines nameID as required string) + */ + const nameId = profile.nameID; + + if (!nameId) { + throw new SamlValidationError( + SamlValidationErrorType.INVALID_NAME_ID, + 'SAML response does not contain NameID' + ); + } + + /** + * Extract InResponseTo and validate if expectedRequestId provided + * Profile uses index signature [attributeName: string]: unknown for additional properties + */ + const inResponseTo = profile.inResponseTo as string | undefined; + + if (expectedRequestId && inResponseTo !== expectedRequestId) { + throw new SamlValidationError( + SamlValidationErrorType.INVALID_IN_RESPONSE_TO, + `InResponseTo mismatch: expected ${expectedRequestId}, got ${inResponseTo}`, + { + expected: expectedRequestId, + received: inResponseTo, + } + ); + } + + /** + * Extract attributes from profile + * node-saml puts SAML attributes directly on the profile object via index signature + */ + const attributes = profile as unknown as Record; + + /** + * Extract email using attributeMapping + */ + const email = extractAttribute(attributes, samlConfig.attributeMapping.email); + + if (!email) { + throw new SamlValidationError( + SamlValidationErrorType.MISSING_EMAIL, + `Email attribute not found in SAML response. Expected attribute: ${samlConfig.attributeMapping.email}`, + { attributeMapping: samlConfig.attributeMapping } + ); + } + + /** + * Extract name using attributeMapping (optional) + */ + let name: string | undefined; + + if (samlConfig.attributeMapping.name) { + name = extractAttribute(attributes, samlConfig.attributeMapping.name); + } + + return { + nameId, + email, + name, + inResponseTo, + }; + } + + /** + * Extract request ID from encoded SAML AuthnRequest + * + * @param encodedRequest - deflated and base64 encoded SAML request + * @returns request ID + */ + private extractRequestIdFromEncodedRequest(encodedRequest: string): string { + /** + * Decode base64 and inflate + */ + const decoded = Buffer.from(encodedRequest, 'base64'); + const inflated = inflateRawSync(decoded as unknown as Uint8Array).toString('utf-8'); + + /** + * Extract ID attribute from AuthnRequest XML + * Format: + */ + const idMatch = inflated.match(/ID="([^"]+)"/); + + if (!idMatch || !idMatch[1]) { + throw new Error('Failed to extract request ID from AuthnRequest'); + } + + return idMatch[1]; + } + + /** + * Create node-saml SAML instance with given configuration + * + * @param acsUrl - Assertion Consumer Service URL + * @param samlConfig - SAML configuration from workspace + * @returns configured SAML instance + */ + private createSamlInstance(acsUrl: string, samlConfig: SamlConfig): SAML { + const spEntityId = process.env.SSO_SP_ENTITY_ID; + + if (!spEntityId) { + throw new Error('SSO_SP_ENTITY_ID environment variable is not set'); + } + + const options: NodeSamlConfig = { + callbackUrl: acsUrl, + entryPoint: samlConfig.ssoUrl, + issuer: spEntityId, + idpIssuer: samlConfig.idpEntityId, + idpCert: samlConfig.x509Cert, + wantAssertionsSigned: true, + wantAuthnResponseSigned: false, + /** + * Allow 2 minutes clock skew for time validation + */ + acceptedClockSkewMs: 2 * 60 * 1000, + }; + + if (samlConfig.nameIdFormat) { + options.identifierFormat = samlConfig.nameIdFormat; + } + + return new SAML(options); + } +} diff --git a/src/sso/saml/store/SamlStateStoreInterface.ts b/src/sso/saml/store/SamlStateStoreInterface.ts new file mode 100644 index 00000000..b2607095 --- /dev/null +++ b/src/sso/saml/store/SamlStateStoreInterface.ts @@ -0,0 +1,61 @@ +/** + * Interface for SAML state store implementations + * + * Defines contract for storing temporary SAML authentication state: + * - RelayState: maps state ID to return URL and workspace ID + * - AuthnRequests: maps request ID to workspace ID for InResponseTo validation + */ +export interface SamlStateStoreInterface { + /** + * Store type identifier + * Used for logging and debugging purposes + * + * @example "redis" or "memory" + */ + readonly type: string; + + /** + * Save RelayState data + * + * @param stateId - unique state identifier (usually UUID) + * @param data - relay state data (returnUrl, workspaceId) + */ + saveRelayState(stateId: string, data: { returnUrl: string; workspaceId: string }): Promise; + + /** + * Get and consume RelayState data + * + * @param stateId - state identifier + * @returns relay state data or null if not found/expired + */ + getRelayState(stateId: string): Promise<{ returnUrl: string; workspaceId: string } | null>; + + /** + * Save AuthnRequest for InResponseTo validation + * + * @param requestId - SAML AuthnRequest ID + * @param workspaceId - workspace ID + */ + saveAuthnRequest(requestId: string, workspaceId: string): Promise; + + /** + * Validate and consume AuthnRequest + * + * @param requestId - SAML AuthnRequest ID (from InResponseTo) + * @param workspaceId - expected workspace ID + * @returns true if request is valid and matches workspace + */ + validateAndConsumeAuthnRequest(requestId: string, workspaceId: string): Promise; + + /** + * Stop cleanup timer (for testing) + * Optional method - only needed for in-memory store + */ + stopCleanupTimer?(): void; + + /** + * Clear all stored state (for testing) + * Optional method - only needed for in-memory store + */ + clear?(): void; +} diff --git a/src/sso/saml/store/memory.store.ts b/src/sso/saml/store/memory.store.ts new file mode 100644 index 00000000..b05f0e11 --- /dev/null +++ b/src/sso/saml/store/memory.store.ts @@ -0,0 +1,199 @@ +import { AuthnRequestState, RelayStateData } from '../types'; +import { SamlStateStoreInterface } from './SamlStateStoreInterface'; + +/** + * In-memory store for SAML state + * + * Stores temporary data needed for SAML authentication flow: + * - RelayState: maps state ID to return URL and workspace ID + * - AuthnRequests: maps request ID to workspace ID for InResponseTo validation + * + * Note: This implementation is not suitable for multi-instance deployments. + * Use Redis store for production environments with multiple API instances. + */ +export class MemorySamlStateStore implements SamlStateStoreInterface { + /** + * Store type identifier + */ + public readonly type = 'memory'; + + private relayStates: Map = new Map(); + private authnRequests: Map = new Map(); + + /** + * Time-to-live for stored state (5 minutes) + */ + private readonly TTL = 5 * 60 * 1000; + + /** + * Interval for cleanup of expired entries (1 minute) + */ + private readonly CLEANUP_INTERVAL = 60 * 1000; + + /** + * Cleanup timer reference + */ + private cleanupTimer: NodeJS.Timeout | null = null; + + /** + * Store constructor + */ + constructor() { + this.startCleanupTimer(); + } + + /** + * Save RelayState data + * + * @param stateId - unique state identifier (usually UUID) + * @param data - relay state data (returnUrl, workspaceId) + */ + public async saveRelayState(stateId: string, data: { returnUrl: string; workspaceId: string }): Promise { + this.relayStates.set(stateId, { + ...data, + expiresAt: Date.now() + this.TTL, + }); + } + + /** + * Get and consume RelayState data + * + * @param stateId - state identifier + * @returns relay state data or null if not found/expired + */ + public async getRelayState(stateId: string): Promise<{ returnUrl: string; workspaceId: string } | null> { + const state = this.relayStates.get(stateId); + + if (!state) { + return null; + } + + /** + * Check expiration + */ + if (Date.now() > state.expiresAt) { + this.relayStates.delete(stateId); + + return null; + } + + /** + * Consume (delete after use to prevent replay) + */ + this.relayStates.delete(stateId); + + return { + returnUrl: state.returnUrl, + workspaceId: state.workspaceId, + }; + } + + /** + * Save AuthnRequest for InResponseTo validation + * + * @param requestId - SAML AuthnRequest ID + * @param workspaceId - workspace ID + */ + public async saveAuthnRequest(requestId: string, workspaceId: string): Promise { + this.authnRequests.set(requestId, { + workspaceId, + expiresAt: Date.now() + this.TTL, + }); + } + + /** + * Validate and consume AuthnRequest + * + * @param requestId - SAML AuthnRequest ID (from InResponseTo) + * @param workspaceId - expected workspace ID + * @returns true if request is valid and matches workspace + */ + public async validateAndConsumeAuthnRequest(requestId: string, workspaceId: string): Promise { + const request = this.authnRequests.get(requestId); + + if (!request) { + return false; + } + + /** + * Check expiration + */ + if (Date.now() > request.expiresAt) { + this.authnRequests.delete(requestId); + + return false; + } + + /** + * Check workspace match + */ + if (request.workspaceId !== workspaceId) { + return false; + } + + /** + * Consume (delete after use to prevent replay attacks) + */ + this.authnRequests.delete(requestId); + + return true; + } + + /** + * Stop cleanup timer (for testing) + */ + public stopCleanupTimer(): void { + if (this.cleanupTimer) { + clearInterval(this.cleanupTimer); + this.cleanupTimer = null; + } + } + + /** + * Clear all stored state (for testing) + */ + public clear(): void { + this.relayStates.clear(); + this.authnRequests.clear(); + } + + /** + * Start periodic cleanup of expired entries + */ + private startCleanupTimer(): void { + /** + * Don't start timer in test environment + */ + if (process.env.NODE_ENV === 'test') { + return; + } + + this.cleanupTimer = setInterval(() => { + this.cleanup(); + }, this.CLEANUP_INTERVAL); + + /** + * Don't prevent process from exiting + */ + this.cleanupTimer.unref(); + } + + /** + * Clean up expired entries + */ + private cleanup(): void { + const now = Date.now(); + + for (const [key, value] of this.relayStates) { + if (now > value.expiresAt) { + this.relayStates.delete(key); + } + } + + for (const [key, value] of this.authnRequests) { + if (now > value.expiresAt) { + this.authnRequests.delete(key); + } + } + } +} diff --git a/src/sso/saml/store/redis.store.ts b/src/sso/saml/store/redis.store.ts new file mode 100644 index 00000000..a4c2a347 --- /dev/null +++ b/src/sso/saml/store/redis.store.ts @@ -0,0 +1,177 @@ +import { RedisClientType } from 'redis'; +import RedisHelper from '../../../redisHelper'; +import { SamlStateStoreInterface } from './SamlStateStoreInterface'; + +/** + * Redis-based store for SAML state + * + * Stores temporary data needed for SAML authentication flow in Redis: + * - RelayState: maps state ID to return URL and workspace ID + * - AuthnRequests: maps request ID to workspace ID for InResponseTo validation + * + * This implementation is suitable for multi-instance deployments as it uses + * Redis as the shared state store. TTLs are handled by Redis automatically. + */ +export class RedisSamlStateStore implements SamlStateStoreInterface { + /** + * Store type identifier + */ + public readonly type = 'redis'; + + /** + * Redis helper instance + */ + private redisHelper: RedisHelper; + + /** + * Time-to-live for stored state in seconds (5 minutes) + */ + private readonly TTL_SECONDS = 5 * 60; + + /** + * Prefix for RelayState keys in Redis + */ + private readonly RELAY_STATE_PREFIX = 'saml:relayState:'; + + /** + * Prefix for AuthnRequest keys in Redis + */ + private readonly AUTHN_REQUEST_PREFIX = 'saml:authnRequest:'; + + /** + * Store constructor + * + * @param redisHelper - Redis helper instance (defaults to singleton) + */ + constructor(redisHelper?: RedisHelper) { + this.redisHelper = redisHelper || RedisHelper.getInstance(); + } + + /** + * Save RelayState data + * + * @param stateId - unique state identifier (usually UUID) + * @param data - relay state data (returnUrl, workspaceId) + */ + public async saveRelayState(stateId: string, data: { returnUrl: string; workspaceId: string }): Promise { + const client = this.getClient(); + const key = `${this.RELAY_STATE_PREFIX}${stateId}`; + const value = JSON.stringify(data); + + await client.setEx(key, this.TTL_SECONDS, value); + } + + /** + * Get and consume RelayState data + * + * @param stateId - state identifier + * @returns relay state data or null if not found/expired + */ + public async getRelayState(stateId: string): Promise<{ returnUrl: string; workspaceId: string } | null> { + const client = this.getClient(); + const key = `${this.RELAY_STATE_PREFIX}${stateId}`; + + /** + * Get and delete atomically to prevent race conditions + * This ensures the state can only be consumed once + * Using MULTI/EXEC for atomic operation (compatible with Redis 5.0+) + */ + const results = await client + .multi() + .get(key) + .del(key) + .exec(); + + if (!results || results.length < 2) { + return null; + } + + const value = results[0] as string | null; + + if (!value) { + return null; + } + + try { + return JSON.parse(value) as { returnUrl: string; workspaceId: string }; + } catch (error) { + console.error('[Redis SAML Store] Failed to parse RelayState:', error); + + return null; + } + } + + /** + * Save AuthnRequest for InResponseTo validation + * + * @param requestId - SAML AuthnRequest ID + * @param workspaceId - workspace ID + */ + public async saveAuthnRequest(requestId: string, workspaceId: string): Promise { + const client = this.getClient(); + const key = `${this.AUTHN_REQUEST_PREFIX}${requestId}`; + + /** + * Store workspaceId as value + */ + await client.setEx(key, this.TTL_SECONDS, workspaceId); + } + + /** + * Validate and consume AuthnRequest + * + * @param requestId - SAML AuthnRequest ID (from InResponseTo) + * @param workspaceId - expected workspace ID + * @returns true if request is valid and matches workspace + */ + public async validateAndConsumeAuthnRequest(requestId: string, workspaceId: string): Promise { + const client = this.getClient(); + const key = `${this.AUTHN_REQUEST_PREFIX}${requestId}`; + + /** + * Get and delete atomically to prevent replay attacks + * This ensures the request can only be validated once + * Using MULTI/EXEC for atomic operation (compatible with Redis 5.0+) + */ + const results = await client + .multi() + .get(key) + .del(key) + .exec(); + + if (!results || results.length < 2) { + return false; + } + + const storedWorkspaceId = results[0] as string | null; + + if (!storedWorkspaceId) { + return false; + } + + /** + * Check workspace match + */ + return storedWorkspaceId === workspaceId; + } + + /** + * Get Redis client + * + * @returns Redis client instance + * @throws Error if Redis client is not available + */ + private getClient(): RedisClientType { + const client = this.redisHelper.getClient(); + + if (!client) { + throw new Error('Redis client is not available. Make sure Redis is initialized.'); + } + + if (!client.isOpen) { + throw new Error('Redis client is not connected. Make sure Redis connection is established.'); + } + + return client; + } +} diff --git a/src/sso/saml/storeFactory.ts b/src/sso/saml/storeFactory.ts new file mode 100644 index 00000000..1c3af998 --- /dev/null +++ b/src/sso/saml/storeFactory.ts @@ -0,0 +1,49 @@ +import RedisHelper from '../../redisHelper'; +import { MemorySamlStateStore } from './store/memory.store'; +import { RedisSamlStateStore } from './store/redis.store'; +import { SamlStateStoreInterface } from './store/SamlStateStoreInterface'; + +/** + * Create SAML state store instance based on configuration + * + * Store type is determined by SAML_STORE_TYPE environment variable: + * - 'redis' (default): Uses Redis store for multi-instance support + * - 'memory': Uses in-memory store (single instance only) + * + * @returns SAML state store instance + */ +export function createSamlStateStore(): SamlStateStoreInterface { + const storeType = (process.env.SAML_STORE_TYPE || 'redis').toLowerCase(); + + if (storeType === 'memory') { + return new MemorySamlStateStore(); + } + + if (storeType === 'redis') { + const redisHelper = RedisHelper.getInstance(); + + if (!redisHelper.isConnected()) { + console.warn( + '[SAML Store] Redis store requested but Redis is not connected. Falling back to memory store.' + ); + + return new MemorySamlStateStore(); + } + + return new RedisSamlStateStore(redisHelper); + } + + /** + * Unknown store type, default to Redis + */ + console.warn( + `[SAML Store] Unknown store type "${storeType}". Defaulting to Redis.` + ); + const redisHelper = RedisHelper.getInstance(); + + if (redisHelper.isConnected()) { + return new RedisSamlStateStore(redisHelper); + } + + return new MemorySamlStateStore(); +} diff --git a/src/sso/saml/types.ts b/src/sso/saml/types.ts new file mode 100644 index 00000000..3a964969 --- /dev/null +++ b/src/sso/saml/types.ts @@ -0,0 +1,85 @@ +/** + * Internal types for SAML module + * These types are used only within the SAML module implementation + */ + +/** + * Error types for SAML validation + */ +export enum SamlValidationErrorType { + INVALID_SIGNATURE = 'INVALID_SIGNATURE', + INVALID_AUDIENCE = 'INVALID_AUDIENCE', + INVALID_RECIPIENT = 'INVALID_RECIPIENT', + INVALID_IN_RESPONSE_TO = 'INVALID_IN_RESPONSE_TO', + EXPIRED_ASSERTION = 'EXPIRED_ASSERTION', + INVALID_NAME_ID = 'INVALID_NAME_ID', + MISSING_EMAIL = 'MISSING_EMAIL', + /** + * Generic validation error when specific type cannot be determined + * Used as fallback when library error messages don't match known patterns + */ + VALIDATION_FAILED = 'VALIDATION_FAILED', +} + +/** + * SAML validation error + */ +export class SamlValidationError extends Error { + /** + * Error type + */ + public readonly type: SamlValidationErrorType; + + /** + * Additional error context + */ + public readonly context?: Record; + + /** + * Error construcor + * @param type - error kind, see SamlValidationErrorType + * @param message - string message + * @param context - additional data + */ + constructor(type: SamlValidationErrorType, message: string, context?: Record) { + super(message); + this.name = 'SamlValidationError'; + this.type = type; + this.context = context; + } +} + +/** + * Stored AuthnRequest state + */ +export interface AuthnRequestState { + /** + * Workspace ID + */ + workspaceId: string; + + /** + * Expiration timestamp + */ + expiresAt: number; +} + +/** + * Stored RelayState data + */ +export interface RelayStateData { + /** + * Return URL after SSO login + */ + returnUrl: string; + + /** + * Workspace ID + */ + workspaceId: string; + + /** + * Expiration timestamp + */ + expiresAt: number; +} diff --git a/src/sso/saml/utils.ts b/src/sso/saml/utils.ts new file mode 100644 index 00000000..cee5828b --- /dev/null +++ b/src/sso/saml/utils.ts @@ -0,0 +1,24 @@ +/** + * Utility functions for SAML operations + */ + +/** + * Extract attribute value from SAML Assertion attributes + * + * @param attributes - SAML attributes object + * @param attributeName - name of the attribute to extract + * @returns attribute value or undefined if not found + */ +export function extractAttribute(attributes: Record, attributeName: string): string | undefined { + const value = attributes[attributeName]; + + if (typeof value === 'string') { + return value; + } + + if (Array.isArray(value) && value.length > 0) { + return value[0]; + } + + return undefined; +} diff --git a/src/sso/types.ts b/src/sso/types.ts new file mode 100644 index 00000000..aef70830 --- /dev/null +++ b/src/sso/types.ts @@ -0,0 +1,35 @@ +/** + * Re-export SSO types from @hawk.so/types + */ +export type { + SamlAttributeMapping, + SamlConfig, + WorkspaceSsoConfig +} from '@hawk.so/types'; + +/** + * Data extracted from SAML Response + */ +export interface SamlResponseData { + /** + * NameID value (user identifier in IdP) + */ + nameId: string; + + /** + * User email + */ + email: string; + + /** + * User name (optional) + */ + name?: string; + + /** + * Identifier that should match AuthnRequest ID + * + * @example "_a8f7c3..." + */ + inResponseTo?: string; +} diff --git a/src/typeDefs/workspace.ts b/src/typeDefs/workspace.ts index cb9e3a6b..a40ad0f7 100644 --- a/src/typeDefs/workspace.ts +++ b/src/typeDefs/workspace.ts @@ -136,6 +136,189 @@ export default gql` """ ids: [ID!] = [] ): [Project!] + + """ + SSO configuration (admin only, returns null for non-admin users) + """ + sso: WorkspaceSsoConfig @definedOnlyForAdmins + } + + """ + SAML attribute mapping configuration + """ + type SamlAttributeMapping { + """ + Attribute name for email in SAML Assertion + Used to map the email attribute from the SAML response to the email attribute in the Hawk database + """ + email: String! + + """ + Attribute name for user name in SAML Assertion + Used to map the name attribute from the SAML response to the name attribute in the Hawk database + """ + name: String + } + + """ + SAML SSO configuration + """ + type SamlConfig { + """ + IdP Entity ID + Used to ensure that the SAML response is coming from the correct IdP + """ + idpEntityId: String! + + """ + SSO URL + Used to redirect user to the correct IdP + """ + ssoUrl: String! + + """ + X.509 certificate (masked for security) + Used to verify the signature of the SAML response + """ + x509Cert: String! + + """ + NameID format + Used to specify the format of the NameID in the SAML response + """ + nameIdFormat: String + + """ + Attribute mapping + Used to map the attributes from the SAML response to the attributes in the Hawk database + """ + attributeMapping: SamlAttributeMapping! + } + + """ + SSO configuration (admin only) + """ + type WorkspaceSsoConfig { + """ + Is SSO enabled + Used to enable or disable SSO for the workspace + """ + enabled: Boolean! + + """ + Is SSO enforced + Used to enforce SSO login for the workspace. If true, only SSO login is allowed. + """ + enforced: Boolean! + + """ + SSO provider type + Used to specify the type of the SSO provider for the workspace + """ + type: String! + + """ + SAML-specific configuration + Used to configure the SAML-specific settings for the workspace + """ + saml: SamlConfig! + } + + """ + SAML attribute mapping input + """ + input SamlAttributeMappingInput { + """ + Attribute name for email in SAML Assertion + Used to map the email attribute from the SAML response to the email attribute in the Hawk database + """ + email: String! + + """ + Attribute name for user name in SAML Assertion + Used to map the name attribute from the SAML response to the name attribute in the Hawk database + """ + name: String + } + + """ + SAML SSO configuration input + """ + input SamlConfigInput { + """ + IdP Entity ID + Used to ensure that the SAML response is coming from the correct IdP + """ + idpEntityId: String! + + """ + SSO URL for redirecting user to IdP + Used to redirect user to the correct IdP + """ + ssoUrl: String! + + """ + X.509 certificate for signature verification (PEM format) + Used to verify the signature of the SAML response + """ + x509Cert: String! + + """ + Desired NameID format + Used to specify the format of the NameID in the SAML response + """ + nameIdFormat: String + + """ + Attribute mapping configuration + Used to map the attributes from the SAML response to the attributes in the Hawk database + """ + attributeMapping: SamlAttributeMappingInput! + } + + """ + SSO configuration input + """ + input WorkspaceSsoConfigInput { + """ + Is SSO enabled + Used to enable or disable SSO for the workspace + """ + enabled: Boolean! + + """ + Is SSO enforced (only SSO login allowed) + Used to enforce SSO login for the workspace. If true, only SSO login is allowed. + """ + enforced: Boolean! + + """ + SAML-specific configuration + Used to configure the SAML-specific settings for the workspace + """ + saml: SamlConfigInput! + } + + """ + Workspace preview with basic public info + Contains only basic fields: id, name, image + Used for public-facing features like SSO login page + """ + type WorkspacePreview { + """ + Workspace ID + """ + id: ID! @renameFrom(name: "_id") + + """ + Workspace name + """ + name: String! + + """ + Workspace image/logo URL + """ + image: String } extend type Query { @@ -144,6 +327,13 @@ export default gql` If ids = [] returns all user's workspaces """ workspaces("Workspace(s) id(s)" ids: [ID] = []): [Workspace] + + """ + Get workspace public info by ID for SSO login page + Returns only id, name, image if SSO is enabled for the workspace + Available without authentication + """ + ssoWorkspace("Workspace ID" id: ID!): WorkspacePreview @allowAnon } extend type Mutation { @@ -286,5 +476,13 @@ export default gql` """ workspaceId: ID! ): Boolean! + + """ + Update workspace SSO configuration (admin only) + """ + updateWorkspaceSso( + workspaceId: ID! + config: WorkspaceSsoConfigInput! + ): Boolean! @requireAdmin } `; diff --git a/src/types/env.d.ts b/src/types/env.d.ts index 82eb4ce9..d57b460f 100644 --- a/src/types/env.d.ts +++ b/src/types/env.d.ts @@ -30,5 +30,32 @@ declare namespace NodeJS { * Secret string for encoding/decoding user's tokens */ JWT_SECRET_AUTH: string; + + /** + * SSO Service Provider Entity ID + * Unique identifier for Hawk in SAML IdP configuration + * + * @example "urn:hawk:tracker:saml" + */ + SSO_SP_ENTITY_ID: string; + + /** + * SAML state store type + * Determines which store implementation to use for SAML authentication state + * - 'redis': Uses Redis store for multi-instance support (default) + * - 'memory': Uses in-memory store (single instance only) + * + * @default 'redis' + * @example "redis" or "memory" + */ + SAML_STORE_TYPE?: string; + + /** + * Redis connection URL + * Used for caching and TimeSeries data + * + * @example "redis://redis:6379" (Docker) or "redis://localhost:6379" (local) + */ + REDIS_URL?: string; } } diff --git a/test/integration/api.env b/test/integration/api.env index 8598b55f..fe3384e4 100644 --- a/test/integration/api.env +++ b/test/integration/api.env @@ -53,10 +53,6 @@ SMTP_SENDER_ADDRESS= # AMQP URL AMQP_URL=amqp://guest:guest@rabbitmq:5672/ -# Billing settings -BILLING_DEBUG=true -BILLING_COMPANY_EMAIL="team@hawk.so" - ### Accounting module ### # Accounting service URL CODEX_ACCOUNTING_URL=http://accounting:3999/graphql @@ -76,13 +72,16 @@ GITHUB_CLIENT_ID=fakedata GITHUB_CLIENT_SECRET=fakedata ## Hawk API public url (used in OAuth to redirect to callback, should match OAuth app callback URL) -API_URL=http://127.0.0.1:4000 +API_URL=http://localhost:4000 ## Garage url -GARAGE_URL=http://127.0.0.1:8080 +GARAGE_URL=http://localhost:8080 ## Garage login url -GARAGE_LOGIN_URL=http://127.0.0.1:8080/login +GARAGE_LOGIN_URL=http://localhost:8080/login + +## SSO Service Provider Entity ID (must match Keycloak client ID) +SSO_SP_ENTITY_ID=urn:hawk:tracker:saml ## Upload dir UPLOAD_DIR=uploads diff --git a/test/integration/cases/billing/check.test.ts b/test/integration/cases/billing/check.test.ts index 2af26fcd..ad199354 100644 --- a/test/integration/cases/billing/check.test.ts +++ b/test/integration/cases/billing/check.test.ts @@ -20,8 +20,8 @@ describe('Check webhook', () => { let businessOperationsCollection: Collection; let workspacesCollection: Collection; - let plans: Collection; - let users: Collection; + let plans: Collection>; + let users: Collection>; let workspace: WorkspaceDBScheme; let externalUser: UserDBScheme; @@ -33,48 +33,77 @@ describe('Check webhook', () => { accountsDb = await global.mongoClient.db('hawk'); workspacesCollection = await accountsDb.collection('workspaces'); - users = await accountsDb.collection('users'); - plans = await accountsDb.collection('plans'); + users = await accountsDb.collection>('users'); + plans = await accountsDb.collection>('plans'); businessOperationsCollection = await accountsDb.collection('businessOperations'); }); beforeEach(async () => { - const currentPlan = (await plans.insertOne({ + const currentPlanId = (await plans.insertOne({ name: 'CurrentTestPlan', monthlyCharge: 10, monthlyChargeCurrency: 'USD', eventsLimit: 1000, isDefault: false, - })).ops[0]; + })).insertedId; + const currentPlan = await plans.findOne({ _id: currentPlanId }); + if (!currentPlan) { + throw new Error('Failed to create currentPlan'); + } - workspace = (await workspacesCollection.insertOne({ + const workspaceId = (await workspacesCollection.insertOne({ name: 'BillingTest', accountId: '123', tariffPlanId: currentPlan._id, - } as WorkspaceDBScheme)).ops[0]; - - externalUser = (await users.insertOne({ + } as WorkspaceDBScheme)).insertedId; + const workspaceResult = await workspacesCollection.findOne({ _id: workspaceId }); + if (!workspaceResult) { + throw new Error('Failed to create workspace'); + } + workspace = workspaceResult as WorkspaceDBScheme; + + const externalUserId = (await users.insertOne({ email: 'user@billing.test', - })).ops[0]; - - member = (await users.insertOne({ + })).insertedId; + const externalUserResult = await users.findOne({ _id: externalUserId }); + if (!externalUserResult) { + throw new Error('Failed to create externalUser'); + } + externalUser = externalUserResult as UserDBScheme; + + const memberId = (await users.insertOne({ email: 'member@billing.test', - })).ops[0]; - - admin = (await users.insertOne({ + })).insertedId; + const memberResult = await users.findOne({ _id: memberId }); + if (!memberResult) { + throw new Error('Failed to create member'); + } + member = memberResult as UserDBScheme; + + const adminId = (await users.insertOne({ email: 'admin@billing.test', - })).ops[0]; - - planToChange = (await plans.insertOne({ + })).insertedId; + const adminResult = await users.findOne({ _id: adminId }); + if (!adminResult) { + throw new Error('Failed to create admin'); + } + admin = adminResult as UserDBScheme; + + const planToChangeId = (await plans.insertOne({ name: 'BasicTest', monthlyCharge: 20, monthlyChargeCurrency: 'USD', eventsLimit: 10000, isDefault: false, - })).ops[0]; - - const team = await accountsDb.collection(`team:${workspace._id.toString()}`); + })).insertedId; + const planToChangeResult = await plans.findOne({ _id: planToChangeId }); + if (!planToChangeResult) { + throw new Error('Failed to create planToChange'); + } + planToChange = planToChangeResult as PlanDBScheme; + + const team = await accountsDb.collection>(`team:${workspace._id.toString()}`); await team.insertOne({ userId: member._id, diff --git a/test/integration/cases/sso.test.ts b/test/integration/cases/sso.test.ts new file mode 100644 index 00000000..59c0e975 --- /dev/null +++ b/test/integration/cases/sso.test.ts @@ -0,0 +1,529 @@ +import { + apiInstance, + waitForKeycloak, + getKeycloakSamlConfig, + createMockSamlResponse, + testUsers, + createTestWorkspace, + createTestUser, + cleanupWorkspace, + cleanupUser +} from '../utils'; +import { ObjectId } from 'mongodb'; + +/** + * Integration tests for SSO functionality + * + * These tests verify the full SSO flow with Keycloak as IdP. + * For some tests, we use mock SAML Response for faster execution. + */ +describe('SSO Integration Tests', () => { + let testWorkspaceId: string; + let keycloakConfig: Awaited>; + + /** + * Setup: Wait for Keycloak and get configuration + */ + beforeAll(async () => { + /** + * Wait for Keycloak to be ready + */ + await waitForKeycloak(); + + /** + * Get Keycloak SAML configuration + */ + keycloakConfig = await getKeycloakSamlConfig(); + }, 60000); + + /** + * Create test workspace before each test + */ + beforeEach(async () => { + testWorkspaceId = await createTestWorkspace({ + name: 'Test SSO Workspace', + sso: { + enabled: true, + enforced: false, + type: 'saml', + saml: { + idpEntityId: keycloakConfig.idpEntityId, + ssoUrl: keycloakConfig.ssoUrl, + x509Cert: keycloakConfig.x509Cert, + nameIdFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress', + attributeMapping: { + email: 'email', + name: 'name', + }, + }, + }, + }); + }); + + /** + * Cleanup after each test + */ + afterEach(async () => { + if (testWorkspaceId) { + await cleanupWorkspace(testWorkspaceId); + } + + /** + * Cleanup test users + */ + for (const user of Object.values(testUsers)) { + try { + await cleanupUser(user.email); + } catch (error) { + /** + * Ignore errors if user doesn't exist + */ + } + } + }); + + describe('SSO Login Initiation', () => { + test('Should redirect to IdP when SSO is enabled', async () => { + /** + * Test Plan: + * 1. Call GET /auth/sso/saml/:workspaceId with SSO-enabled workspace + * 2. Verify 302 redirect response + * 3. Verify redirect location contains IdP SSO URL + * 4. Verify redirect contains SAMLRequest and RelayState parameters + * + * Expected: User is redirected to Keycloak login page + */ + + /** + * Step 1: Call SSO initiation endpoint + */ + const response = await apiInstance.get( + `/auth/sso/saml/${testWorkspaceId}`, + { + maxRedirects: 0, + validateStatus: () => true, + } + ); + + /** + * Step 2-4: Verify redirect to Keycloak with proper SAML parameters + */ + expect(response.status).toBe(302); + expect(response.headers.location).toBeDefined(); + expect(response.headers.location).toContain(keycloakConfig.ssoUrl); + expect(response.headers.location).toContain('SAMLRequest'); + expect(response.headers.location).toContain('RelayState'); + }); + + test('Should return 400 if SSO is not enabled for workspace', async () => { + /** + * Test Plan: + * 1. Create a workspace without SSO configuration + * 2. Call GET /auth/sso/saml/:workspaceId for that workspace + * 3. Verify 400 error response with appropriate message + * + * Expected: API returns error indicating SSO is not enabled + */ + + /** + * Step 1: Create workspace without SSO + */ + const workspaceWithoutSso = await createTestWorkspace({ + name: 'Workspace Without SSO', + }); + + try { + /** + * Step 2: Try to initiate SSO for workspace without SSO + */ + const response = await apiInstance.get( + `/auth/sso/saml/${workspaceWithoutSso}`, + { + validateStatus: () => true, + } + ); + + /** + * Step 3: Verify error response + */ + expect(response.status).toBe(400); + expect(response.data.error).toContain('SSO is not enabled'); + } finally { + await cleanupWorkspace(workspaceWithoutSso); + } + }); + + test('Should return 400 if workspace does not exist', async () => { + /** + * Test Plan: + * 1. Generate a random workspace ID that doesn't exist in database + * 2. Call GET /auth/sso/saml/:workspaceId with non-existent ID + * 3. Verify 400 error response + * + * Expected: API returns error for non-existent workspace + */ + + /** + * Step 1: Generate non-existent workspace ID + */ + const nonExistentWorkspaceId = new ObjectId().toString(); + + /** + * Step 2: Try to initiate SSO for non-existent workspace + */ + const response = await apiInstance.get( + `/auth/sso/saml/${nonExistentWorkspaceId}`, + { + validateStatus: () => true, + } + ); + + expect(response.status).toBe(400); + expect(response.data.error).toBeDefined(); + }); + }); + + describe('ACS (Assertion Consumer Service)', () => { + /** + * This test requires full E2E flow with browser automation + * + * 1. Initiate SSO login + * 2. Follow redirects to Keycloak + * 3. Submit login form + * 4. Receive SAML Response from Keycloak + * 5. Return to Hawk ACS endpoint + * 6. Verify user was created (JIT provisioning) + * 7. Verify user was logged in + * 8. Verify user was redirected to the correct return URL with tokens + */ + test.todo('Should process valid SAML Response and create user session'); + + test('Should reject invalid SAML Response', async () => { + /** + * Test Plan: + * 1. Create an invalid SAML Response (not properly encoded) + * 2. POST invalid SAMLResponse to ACS endpoint + * 3. Verify 400 error response + * + * Expected: API rejects invalid SAML Response + */ + + /** + * Step 1-2: Send invalid SAML Response (not base64 encoded) + */ + const response = await apiInstance.post( + `/auth/sso/saml/${testWorkspaceId}/acs`, + new URLSearchParams({ + SAMLResponse: 'invalid-saml-response', + }), + { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + validateStatus: () => true, + } + ); + + expect(response.status).toBe(400); + expect(response.data.error).toBeDefined(); + }); + + test('Should link SAML identity to existing user', async () => { + /** + * Test Plan: + * 1. Create a user in database first (pre-existing user) + * 2. Create mock SAML Response for that user's email + * 3. POST SAMLResponse to ACS endpoint + * 4. Verify SAML identity is linked to existing user (not creating new user) + * + * Expected: Existing user gets SAML identity linked + */ + + const testUser = testUsers.alice; + + /** + * Step 1: Create user first (pre-existing user) + */ + await createTestUser({ + email: testUser.email, + name: testUser.firstName, + workspaces: [ testWorkspaceId ], + }); + + /** + * Step 2: Create mock SAML Response for existing user + */ + const samlResponse = createMockSamlResponse( + testUser.email, + testUser.email, + { + name: `${testUser.firstName} ${testUser.lastName}`, + acsUrl: `http://api:4000/auth/sso/saml/${testWorkspaceId}/acs`, + } + ); + + /** + * Step 3: POST SAML Response to ACS endpoint + */ + const response = await apiInstance.post( + `/auth/sso/saml/${testWorkspaceId}/acs`, + new URLSearchParams({ + SAMLResponse: samlResponse, + }), + { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + maxRedirects: 0, + validateStatus: () => true, + } + ); + + /** + * Step 4: Verify response + * + * Note: Mock SAML Response will fail validation (400) + * In a real scenario with valid SAML: + * - Existing user would have SAML identity linked + * - User would be logged in (302 redirect) + */ + expect([302, 400]).toContain(response.status); + }); + + test('Should respect RelayState and redirect correctly', async () => { + /** + * Test Plan: + * 1. Call SSO initiation with returnUrl parameter + * 2. Extract RelayState from redirect + * 3. POST SAML Response with same RelayState + * 4. Verify final redirect includes original returnUrl + * + * Note: This test requires full E2E flow with browser automation + * Placeholder for now - to be implemented with puppeteer/playwright + * + * Expected: RelayState is preserved throughout SSO flow + */ + expect(true).toBe(true); + }); + }); + + describe('SSO Enforcement', () => { + test('Should block email/password login when SSO is enforced', async () => { + /** + * Test Plan: + * 1. Create workspace with SSO enabled and enforced + * 2. Create user in that workspace + * 3. Try to login via email/password through GraphQL mutation + * 4. Verify login is blocked with SSO_REQUIRED error + * + * Expected: Email/password login is blocked, user must use SSO + */ + + /** + * Step 1: Create workspace with enforced SSO + */ + const enforcedWorkspace = await createTestWorkspace({ + name: 'Enforced SSO Workspace', + sso: { + enabled: true, + enforced: true, + type: 'saml', + saml: { + idpEntityId: keycloakConfig.idpEntityId, + ssoUrl: keycloakConfig.ssoUrl, + x509Cert: keycloakConfig.x509Cert, + nameIdFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress', + attributeMapping: { + email: 'email', + name: 'firstName', + }, + }, + }, + }); + + /** + * Step 2: Create user with password in enforced workspace + */ + const testUser = testUsers.bob; + + await createTestUser({ + email: testUser.email, + password: testUser.password, + name: testUser.firstName, + workspaces: [ enforcedWorkspace ], + }); + + /** + * Step 3: Try to login with email/password via GraphQL mutation + */ + const loginMutation = ` + mutation Login($email: String!, $password: String!) { + login(email: $email, password: $password) { + accessToken + refreshToken + } + } + `; + + const response = await apiInstance.post( + '/graphql', + { + query: loginMutation, + variables: { + email: testUser.email, + password: testUser.password, + }, + }, + { + validateStatus: () => true, + } + ); + + /** + * Step 4: Verify login is blocked with SSO error + */ + expect(response.data.errors).toBeDefined(); + expect(response.data.errors[0].message).toContain('SSO'); + + await cleanupWorkspace(enforcedWorkspace); + }); + + test('Should allow SSO login even when enforced', async () => { + /** + * Test Plan: + * 1. Create workspace with SSO enabled and enforced + * 2. Call GET /auth/sso/saml/:workspaceId (SSO initiation) + * 3. Verify redirect to IdP works correctly + * + * Expected: SSO login works even when enforced (only email/password is blocked) + */ + + /** + * Step 1: Create workspace with enforced SSO + */ + const enforcedWorkspace = await createTestWorkspace({ + name: 'Enforced SSO Workspace', + sso: { + enabled: true, + enforced: true, + type: 'saml', + saml: { + idpEntityId: keycloakConfig.idpEntityId, + ssoUrl: keycloakConfig.ssoUrl, + x509Cert: keycloakConfig.x509Cert, + nameIdFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress', + attributeMapping: { + email: 'email', + name: 'firstName', + }, + }, + }, + }); + + /** + * Step 2: Initiate SSO login for enforced workspace + */ + const response = await apiInstance.get( + `/auth/sso/saml/${enforcedWorkspace}`, + { + maxRedirects: 0, + validateStatus: (status) => status === 302, + } + ); + + /** + * Step 3: Verify SSO initiation works + */ + expect(response.status).toBe(302); + expect(response.headers.location).toContain(keycloakConfig.ssoUrl); + + await cleanupWorkspace(enforcedWorkspace); + }); + }); + + describe('Error Handling', () => { + test('Should handle missing SAML configuration gracefully', async () => { + /** + * Test Plan: + * 1. Create workspace with SSO enabled but empty configuration + * 2. Try to initiate SSO login + * 3. Verify error response (400 or 500) + * + * Expected: API handles incomplete config gracefully with error + */ + + /** + * Step 1: Create workspace with incomplete SSO config + */ + const incompleteWorkspace = await createTestWorkspace({ + name: 'Incomplete SSO Workspace', + sso: { + enabled: true, + enforced: false, + type: 'saml', + saml: { + idpEntityId: '', + ssoUrl: '', + x509Cert: '', + nameIdFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress', + attributeMapping: { + email: 'email', + }, + }, + }, + }); + + /** + * Step 2: Try to initiate SSO with incomplete config + */ + const response = await apiInstance.get( + `/auth/sso/saml/${incompleteWorkspace}`, + { + validateStatus: () => true, + } + ); + + /** + * Step 3: Verify error response + */ + expect([400, 500]).toContain(response.status); + + await cleanupWorkspace(incompleteWorkspace); + }); + + test('Should handle IdP errors gracefully', async () => { + /** + * Test Plan: + * 1. Mock IdP returning error in SAML Response + * 2. POST error SAML Response to ACS + * 3. Verify API handles IdP errors gracefully + * + * Note: This would require mocking various SAML error responses + * (e.g., authentication failure, request denied, etc.) + * To be implemented with proper SAML error response mocks + * + * Expected: API gracefully handles and displays IdP errors + */ + expect(true).toBe(true); + }); + }); +}); + +/** + * NOTE: Integration tests with Keycloak + * + * These tests verify: + * 1. SSO initiation and redirect to Keycloak + * 2. ACS endpoint behavior (with mocked SAML Response) + * 3. SSO enforcement + * 4. Error handling + * + * Limitations: + * - Mock SAML Response won't pass signature validation + * - For full end-to-end tests with real Keycloak SAML Response, + * browser automation (puppeteer/playwright) is needed + * + * Manual Testing: + * - See docs/Keycloak.md for manual testing instructions + * - Use Keycloak admin console to view test users and SAML configuration + */ diff --git a/test/integration/jestEnv.js b/test/integration/jestEnv.js index 0249f971..cad69c67 100644 --- a/test/integration/jestEnv.js +++ b/test/integration/jestEnv.js @@ -13,6 +13,14 @@ class CustomEnvironment extends NodeEnvironment { */ async setup() { await super.setup(); + + /** + * Add performance API polyfill for MongoDB driver + * MongoDB driver uses performance.now() which is not available in Jest environment by default + */ + const { performance } = require('perf_hooks'); + this.global.performance = performance; + const mongoClient = new mongodb.MongoClient('mongodb://mongodb:27017', { useUnifiedTopology: true }); await mongoClient.connect(); diff --git a/test/integration/keycloak/import/hawk-realm.json b/test/integration/keycloak/import/hawk-realm.json new file mode 100644 index 00000000..854c02ff --- /dev/null +++ b/test/integration/keycloak/import/hawk-realm.json @@ -0,0 +1,156 @@ +{ + "realm": "hawk", + "enabled": true, + "sslRequired": "none", + "registrationAllowed": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": false, + "editUsernameAllowed": false, + "bruteForceProtected": false, + "users": [ + { + "username": "testuser", + "enabled": true, + "email": "testuser@hawk.local", + "firstName": "Test", + "lastName": "User", + "credentials": [ + { + "type": "password", + "value": "password123", + "temporary": false + } + ], + "attributes": { + "department": ["Engineering"], + "title": ["Software Engineer"] + } + }, + { + "username": "alice", + "enabled": true, + "email": "alice@hawk.local", + "firstName": "Alice", + "lastName": "Johnson", + "credentials": [ + { + "type": "password", + "value": "password123", + "temporary": false + } + ], + "attributes": { + "department": ["Product"], + "title": ["Product Manager"] + } + }, + { + "username": "bob", + "enabled": true, + "email": "bob@hawk.local", + "firstName": "Bob", + "lastName": "Smith", + "credentials": [ + { + "type": "password", + "value": "password123", + "temporary": false + } + ], + "attributes": { + "department": ["Engineering"], + "title": ["Senior Developer"] + } + } + ], + "clients": [ + { + "clientId": "urn:hawk:tracker:saml", + "name": "Hawk Service Provider", + "enabled": true, + "protocol": "saml", + "frontchannelLogout": true, + "attributes": { + "saml.assertion.signature": "true", + "saml.authnstatement": "true", + "saml.client.signature": "false", + "saml.encrypt": "false", + "saml.force.post.binding": "true", + "saml.multivalued.roles": "false", + "saml.onetimeuse.condition": "false", + "saml.server.signature": "true", + "saml.server.signature.keyinfo.ext": "false", + "saml_force_name_id_format": "false", + "saml_name_id_format": "email", + "saml_signature_algorithm": "RSA_SHA256", + "saml.assertion.lifespan": "300" + }, + "defaultClientScopes": [], + "optionalClientScopes": [], + "protocolMappers": [ + { + "name": "email", + "protocol": "saml", + "protocolMapper": "saml-user-property-mapper", + "consentRequired": false, + "config": { + "attribute.nameformat": "Basic", + "user.attribute": "email", + "attribute.name": "email" + } + }, + { + "name": "name", + "protocol": "saml", + "protocolMapper": "saml-javascript-mapper", + "consentRequired": false, + "config": { + "attribute.nameformat": "Basic", + "attribute.name": "name", + "single": "true", + "script": "var firstName = user.getFirstName() || ''; var lastName = user.getLastName() || ''; firstName + (firstName && lastName ? ' ' : '') + lastName;" + } + }, + { + "name": "department", + "protocol": "saml", + "protocolMapper": "saml-user-attribute-mapper", + "consentRequired": false, + "config": { + "attribute.nameformat": "Basic", + "user.attribute": "department", + "attribute.name": "department" + } + }, + { + "name": "title", + "protocol": "saml", + "protocolMapper": "saml-user-attribute-mapper", + "consentRequired": false, + "config": { + "attribute.nameformat": "Basic", + "user.attribute": "title", + "attribute.name": "title" + } + } + ], + "redirectUris": [ + "http://localhost:4000/*", + "http://127.0.0.1:4000/*", + "http://localhost:8080/*", + "http://127.0.0.1:8080/*", + "http://api:4000/*" + ], + "webOrigins": [ + "http://localhost:4000", + "http://127.0.0.1:4000", + "http://localhost:8080", + "http://127.0.0.1:8080", + "http://api:4000" + ], + "adminUrl": "", + "baseUrl": "" + } + ] +} diff --git a/test/integration/keycloak/setup.sh b/test/integration/keycloak/setup.sh new file mode 100755 index 00000000..28c95092 --- /dev/null +++ b/test/integration/keycloak/setup.sh @@ -0,0 +1,129 @@ +#!/bin/bash + +# Keycloak Setup Script for Hawk SSO Development +# This script configures Keycloak with realm, client, and test users + +set -e + +KEYCLOAK_URL="${KEYCLOAK_URL:-http://localhost:8180}" +ADMIN_USER="${KEYCLOAK_ADMIN:-admin}" +ADMIN_PASSWORD="${KEYCLOAK_ADMIN_PASSWORD:-admin}" +REALM_NAME="hawk" + +# Determine the script directory and realm file path +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REALM_FILE="${SCRIPT_DIR}/import/hawk-realm.json" + +# Check if realm file exists +if [ ! -f "$REALM_FILE" ]; then + echo "❌ Realm configuration file not found: $REALM_FILE" + exit 1 +fi + +echo "🔧 Setting up Keycloak for Hawk SSO..." +echo "Keycloak URL: $KEYCLOAK_URL" + +# Wait for Keycloak to be ready +echo "⏳ Waiting for Keycloak to start..." +MAX_RETRIES=30 +RETRY_COUNT=0 + +while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do + if curl -s -f "$KEYCLOAK_URL/health/ready" > /dev/null 2>&1; then + echo "✓ Keycloak is ready!" + break + fi + RETRY_COUNT=$((RETRY_COUNT + 1)) + echo "Waiting for Keycloak... ($RETRY_COUNT/$MAX_RETRIES)" + sleep 2 +done + +if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then + echo "❌ Keycloak failed to start in time" + exit 1 +fi + +# Get admin token +echo "🔑 Obtaining admin token..." +TOKEN_RESPONSE=$(curl -s -X POST "$KEYCLOAK_URL/realms/master/protocol/openid-connect/token" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=$ADMIN_USER" \ + -d "password=$ADMIN_PASSWORD" \ + -d "grant_type=password" \ + -d "client_id=admin-cli") + +ACCESS_TOKEN=$(echo "$TOKEN_RESPONSE" | grep -o '"access_token":"[^"]*' | cut -d'"' -f4) + +if [ -z "$ACCESS_TOKEN" ]; then + echo "❌ Failed to obtain admin token" + echo "Response: $TOKEN_RESPONSE" + exit 1 +fi + +echo "✓ Admin token obtained" + +# Check if realm already exists +echo "🔍 Checking if realm '$REALM_NAME' exists..." +REALM_EXISTS=$(curl -s -o /dev/null -w "%{http_code}" "$KEYCLOAK_URL/admin/realms/$REALM_NAME" \ + -H "Authorization: Bearer $ACCESS_TOKEN") + +if [ "$REALM_EXISTS" = "200" ]; then + echo "⚠️ Realm '$REALM_NAME' already exists. Skipping realm creation." + echo " To reconfigure, delete the realm manually or remove Keycloak data volume." +else + echo "📦 Importing realm from configuration..." + + # Import realm + IMPORT_RESPONSE=$(curl -s -w "\n%{http_code}" -X POST "$KEYCLOAK_URL/admin/realms" \ + -H "Authorization: Bearer $ACCESS_TOKEN" \ + -H "Content-Type: application/json" \ + -d @"$REALM_FILE") + + HTTP_CODE=$(echo "$IMPORT_RESPONSE" | tail -n1) + RESPONSE_BODY=$(echo "$IMPORT_RESPONSE" | sed '$d') + + if [ "$HTTP_CODE" = "201" ]; then + echo "✓ Realm '$REALM_NAME' created successfully!" + else + echo "❌ Failed to create realm (HTTP $HTTP_CODE)" + echo "Response: $RESPONSE_BODY" + exit 1 + fi +fi + +# Get realm's SAML descriptor for reference +echo "📋 Fetching SAML metadata..." +SAML_DESCRIPTOR=$(curl -s "$KEYCLOAK_URL/realms/$REALM_NAME/protocol/saml/descriptor") + +if echo "$SAML_DESCRIPTOR" | grep -q "EntityDescriptor"; then + echo "✓ SAML metadata is available" + echo "" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "🎉 Keycloak setup completed successfully!" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "" + echo "📍 Configuration Details:" + echo " Keycloak Admin Console: $KEYCLOAK_URL" + echo " Admin credentials: $ADMIN_USER / $ADMIN_PASSWORD" + echo " Realm: $REALM_NAME" + echo " Client ID: hawk-sp" + echo "" + echo "👥 Test Users:" + echo " - testuser@hawk.local / password123" + echo " - alice@hawk.local / password123" + echo " - bob@hawk.local / password123" + echo "" + echo "🔗 SSO URLs for Hawk configuration:" + echo " IdP Entity ID: $KEYCLOAK_URL/realms/$REALM_NAME" + echo " SSO URL: $KEYCLOAK_URL/realms/$REALM_NAME/protocol/saml" + echo " SAML Metadata: $KEYCLOAK_URL/realms/$REALM_NAME/protocol/saml/descriptor" + echo "" + echo "📝 Next steps:" + echo " 1. Open Hawk SSO settings in workspace" + echo " 2. Configure SSO with the URLs above" + echo " 3. Copy X.509 certificate from Keycloak admin console" + echo " (Realm Settings → Keys → RS256 → Certificate)" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" +else + echo "⚠️ SAML metadata not available yet. Keycloak may still be initializing." +fi diff --git a/test/integration/utils.ts b/test/integration/utils.ts index 26701341..53e1fcc7 100644 --- a/test/integration/utils.ts +++ b/test/integration/utils.ts @@ -18,3 +18,13 @@ export const accountingEnv = dotenv.config({ path: path.join(__dirname, './accou export const apiInstance = axios.create({ baseURL: `http://api:${apiEnv.PORT}`, }); + +/** + * Export Keycloak utilities + */ +export * from './utils/keycloak'; + +/** + * Export Workspace utilities + */ +export * from './utils/workspace'; diff --git a/test/integration/utils/keycloak.ts b/test/integration/utils/keycloak.ts new file mode 100644 index 00000000..48d1d808 --- /dev/null +++ b/test/integration/utils/keycloak.ts @@ -0,0 +1,297 @@ +import axios from 'axios'; +import { parseString } from 'xml2js'; +import { promisify } from 'util'; + +const parseXml = promisify(parseString); + +/** + * Keycloak configuration + */ +export const keycloakConfig = { + baseUrl: process.env.KEYCLOAK_URL || 'http://keycloak:8180', + realm: 'hawk', + clientId: 'hawk-sp', + adminUser: 'admin', + adminPassword: 'admin', +}; + +/** + * Test user credentials + */ +export const testUsers = { + testuser: { + username: 'testuser', + password: 'password123', + email: 'testuser@hawk.local', + firstName: 'Test', + lastName: 'User', + }, + alice: { + username: 'alice', + password: 'password123', + email: 'alice@hawk.local', + firstName: 'Alice', + lastName: 'Johnson', + }, + bob: { + username: 'bob', + password: 'password123', + email: 'bob@hawk.local', + firstName: 'Bob', + lastName: 'Smith', + }, +}; + +/** + * Keycloak SAML configuration for Hawk + */ +export interface KeycloakSamlConfig { + /** + * IdP Entity ID + */ + idpEntityId: string; + + /** + * SSO URL + */ + ssoUrl: string; + + /** + * X.509 Certificate (PEM format, without headers) + */ + x509Cert: string; +} + +/** + * Get Keycloak admin token + */ +export async function getAdminToken(): Promise { + const response = await axios.post( + `${keycloakConfig.baseUrl}/realms/master/protocol/openid-connect/token`, + new URLSearchParams({ + username: keycloakConfig.adminUser, + password: keycloakConfig.adminPassword, + grant_type: 'password', + client_id: 'admin-cli', + }), + { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + } + ); + + return response.data.access_token; +} + +/** + * Get Keycloak SAML configuration for Hawk + */ +export async function getKeycloakSamlConfig(): Promise { + /** + * Fetch SAML metadata descriptor + */ + const descriptorUrl = `${keycloakConfig.baseUrl}/realms/${keycloakConfig.realm}/protocol/saml/descriptor`; + const response = await axios.get(descriptorUrl); + + /** + * Parse XML to extract certificate + * xml2js handles namespaces by creating keys with namespace prefixes + * Keycloak uses 'md:' prefix for metadata elements and 'ds:' for signature elements + */ + const parsed: any = await parseXml(response.data); + + /** + * Access EntityDescriptor with namespace prefix + */ + const entityDescriptor = parsed['md:EntityDescriptor'] || parsed.EntityDescriptor; + + if (!entityDescriptor) { + throw new Error('EntityDescriptor not found in SAML metadata'); + } + + /** + * Access IDPSSODescriptor with namespace prefix + */ + const idpDescriptor = + entityDescriptor['md:IDPSSODescriptor']?.[0] || entityDescriptor.IDPSSODescriptor?.[0]; + + if (!idpDescriptor) { + throw new Error('IDPSSODescriptor not found in SAML metadata'); + } + + /** + * Find signing certificate from KeyDescriptor elements + */ + let x509Cert = ''; + const keyDescriptors = idpDescriptor['md:KeyDescriptor'] || idpDescriptor.KeyDescriptor || []; + + for (const kd of keyDescriptors) { + if (!kd.$?.use || kd.$?.use === 'signing') { + /** + * Try different possible paths for X509Certificate with namespace prefixes + */ + const keyInfo = kd['ds:KeyInfo']?.[0] || kd.KeyInfo?.[0]; + + if (keyInfo) { + const x509Data = keyInfo['ds:X509Data']?.[0] || keyInfo.X509Data?.[0]; + + if (x509Data) { + x509Cert = x509Data['ds:X509Certificate']?.[0] || x509Data.X509Certificate?.[0] || ''; + } + } + + if (x509Cert) { + break; + } + } + } + + if (!x509Cert) { + throw new Error('X509 Certificate not found in SAML metadata'); + } + + return { + idpEntityId: `${keycloakConfig.baseUrl}/realms/${keycloakConfig.realm}`, + ssoUrl: `${keycloakConfig.baseUrl}/realms/${keycloakConfig.realm}/protocol/saml`, + x509Cert: x509Cert.trim(), + }; +} + +/** + * Simulate SSO login flow and get SAML Response + * + * This function performs browser-like login to Keycloak and extracts the SAML Response + * + * @param username - Keycloak username + * @param password - Keycloak password + * @param acsUrl - ACS URL where SAML Response should be sent + * @returns SAML Response and RelayState + */ +export async function performKeycloakLogin( + username: string, + password: string, + acsUrl: string +): Promise<{ samlResponse: string; relayState?: string }> { + /** + * This is a simplified version. In a real test, you would need to: + * 1. Make a request to Hawk's SSO initiation endpoint + * 2. Follow redirects to Keycloak + * 3. Submit login form + * 4. Extract SAML Response from the POST to ACS + * + * For now, this is a placeholder that would require additional libraries + * like puppeteer or playwright for full browser automation. + */ + throw new Error('Browser automation not implemented. Use mock SAML Response for tests.'); +} + +/** + * Create a mock SAML Response for testing + * + * NOTE: This is a simplified mock. For real tests, you should either: + * - Use actual Keycloak-generated SAML Response (via browser automation) + * - Use a proper SAML Response generator library + * + * @param email - User email + * @param nameId - Name ID (usually email) + * @param attributes - Additional SAML attributes + * @returns Base64-encoded SAML Response + */ +export function createMockSamlResponse( + email: string, + nameId: string, + attributes: Record = {} +): string { + const now = new Date().toISOString(); + const notOnOrAfter = new Date(Date.now() + 300000).toISOString(); // 5 minutes + const issueInstant = now; + const sessionNotOnOrAfter = new Date(Date.now() + 3600000).toISOString(); // 1 hour + + /** + * This is a minimal SAML Response structure + * In production, this would be generated by the IdP (Keycloak) + */ + const samlResponse = ` + + ${keycloakConfig.baseUrl}/realms/${keycloakConfig.realm} + + + + + ${keycloakConfig.baseUrl}/realms/${keycloakConfig.realm} + + ${nameId} + + + + + + + hawk-sp + + + + + urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport + + + + + ${email} + + ${attributes.name ? ` + + ${attributes.name} + ` : ''} + + +`; + + /** + * Base64 encode the SAML Response + */ + return Buffer.from(samlResponse).toString('base64'); +} + +/** + * Generate random ID for SAML messages + */ +function generateId(): string { + return '_' + Array.from({ length: 32 }, () => Math.random().toString(36)[2]).join(''); +} + +/** + * Wait for Keycloak to be ready + * + * @param maxRetries - Maximum number of retries + * @param retryInterval - Interval between retries in ms + */ +export async function waitForKeycloak(maxRetries = 30, retryInterval = 2000): Promise { + for (let i = 0; i < maxRetries; i++) { + try { + await axios.get(`${keycloakConfig.baseUrl}/health/ready`); + return; + } catch (error) { + if (i === maxRetries - 1) { + throw new Error('Keycloak failed to start in time'); + } + await new Promise(resolve => setTimeout(resolve, retryInterval)); + } + } +} diff --git a/test/integration/utils/workspace.ts b/test/integration/utils/workspace.ts new file mode 100644 index 00000000..0437a98a --- /dev/null +++ b/test/integration/utils/workspace.ts @@ -0,0 +1,205 @@ +import { MongoClient, ObjectId } from 'mongodb'; +import type { WorkspaceDBScheme, UserDBScheme } from '@hawk.so/types'; +import argon2 from 'argon2'; + +/** + * Polyfill for performance API (required by MongoDB driver) + */ +if (typeof global.performance === 'undefined') { + const { performance } = require('perf_hooks'); + + global.performance = performance as any; +} + +/** + * Get MongoDB connection for tests + * Uses the same database as API (from MONGO_HAWK_DB_URL) to ensure data consistency + */ +export async function getMongoConnection(): Promise { + const mongoUrl = process.env.MONGO_HAWK_DB_URL || 'mongodb://mongodb:27017/hawk'; + const client = new MongoClient(mongoUrl); + + await client.connect(); + + return client; +} + +/** + * Create test workspace with SSO configuration + * + * @param config - Workspace configuration + * @returns Created workspace ID + */ +export async function createTestWorkspace(config: { + name?: string; + sso?: WorkspaceDBScheme['sso']; + members?: string[]; +}): Promise { + const client = await getMongoConnection(); + const db = client.db(); + const workspacesCollection = db.collection('workspaces'); + + /** + * Create minimal workspace data for tests + * Only required fields + SSO config + */ + const workspaceData: any = { + name: config.name || 'Test Workspace', + inviteHash: new ObjectId().toString(), + }; + + /** + * Add SSO config if provided + */ + if (config.sso) { + workspaceData.sso = config.sso; + } + + const result = await workspacesCollection.insertOne(workspaceData as WorkspaceDBScheme); + + await client.close(); + + return result.insertedId.toString(); +} + +/** + * Create test user + * + * @param config - User configuration + * @returns Created user ID + */ +export async function createTestUser(config: { + email: string; + password?: string; + name?: string; + workspaces?: string[]; +}): Promise { + const client = await getMongoConnection(); + const db = client.db(); + const usersCollection = db.collection('users'); + + /** + * Hash password if provided + */ + const hashedPassword = config.password ? await argon2.hash(config.password) : undefined; + + /** + * Build workspaces membership object + * Format: { [workspaceId]: { isPending: false } } + */ + const workspaces: Record = {}; + + if (config.workspaces && config.workspaces.length > 0) { + for (const workspaceId of config.workspaces) { + workspaces[workspaceId] = { isPending: false }; + } + } + + const userData: Partial = { + email: config.email, + password: hashedPassword, + name: config.name || config.email, + workspaces: Object.keys(workspaces).length > 0 ? workspaces : undefined, + notifications: { + channels: { + email: { + endpoint: config.email, + isEnabled: true, + minPeriod: 0, + }, + }, + whatToReceive: { + IssueAssigning: true, + WeeklyDigest: true, + SystemMessages: true, + }, + }, + }; + + const result = await usersCollection.insertOne(userData as UserDBScheme); + const userId = result.insertedId; + + /** + * Add user to workspace teams if workspaces specified + */ + if (config.workspaces && config.workspaces.length > 0) { + for (const workspaceId of config.workspaces) { + const teamCollection = db.collection(`team:${workspaceId}`); + + await teamCollection.insertOne({ + userId, + isConfirmed: true, + }); + } + } + + await client.close(); + + return userId.toString(); +} + +/** + * Get workspace by ID + * + * @param workspaceId - Workspace ID + * @returns Workspace data or null + */ +export async function getWorkspace(workspaceId: string): Promise { + const client = await getMongoConnection(); + const db = client.db(); + const workspacesCollection = db.collection('workspaces'); + + const workspace = await workspacesCollection.findOne({ _id: new ObjectId(workspaceId) }); + + await client.close(); + + return workspace; +} + +/** + * Get user by email + * + * @param email - User email + * @returns User data or null + */ +export async function getUserByEmail(email: string): Promise { + const client = await getMongoConnection(); + const db = client.db(); + const usersCollection = db.collection('users'); + + const user = await usersCollection.findOne({ email }); + + await client.close(); + + return user; +} + +/** + * Clean up test workspace + * + * @param workspaceId - Workspace ID to delete + */ +export async function cleanupWorkspace(workspaceId: string): Promise { + const client = await getMongoConnection(); + const db = client.db(); + const workspacesCollection = db.collection('workspaces'); + + await workspacesCollection.deleteOne({ _id: new ObjectId(workspaceId) }); + + await client.close(); +} + +/** + * Clean up test user + * + * @param email - User email to delete + */ +export async function cleanupUser(email: string): Promise { + const client = await getMongoConnection(); + const db = client.db(); + const usersCollection = db.collection('users'); + + await usersCollection.deleteOne({ email }); + + await client.close(); +} diff --git a/test/models/user.test.ts b/test/models/user.test.ts new file mode 100644 index 00000000..aebee095 --- /dev/null +++ b/test/models/user.test.ts @@ -0,0 +1,170 @@ +import '../../src/env-test'; +import UserModel from '../../src/models/user'; +import UsersFactory from '../../src/models/usersFactory'; +import * as mongo from '../../src/mongo'; +import DataLoaders from '../../src/dataLoaders'; +import { generateTestString } from '../utils/testData'; + +beforeAll(async () => { + await mongo.setupConnections(); +}); + +describe('UserModel SSO identities', () => { + let usersFactory: UsersFactory; + let testUser: UserModel; + + beforeEach(async () => { + /** + * Create factory instance with fresh DataLoaders + */ + usersFactory = new UsersFactory( + mongo.databases.hawk as any, + new DataLoaders(mongo.databases.hawk as any) + ); + }); + + afterEach(async () => { + if (testUser?.email) { + await usersFactory.deleteByEmail(testUser.email); + } + }); + + describe('linkSamlIdentity', () => { + it('should link SAML identity to user and update local state', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const testSamlId = generateTestString('model-link'); + const testEmail = generateTestString('model-test-sso@example.com'); + + testUser = await usersFactory.create(testEmail, 'test-password-123'); + /** + * Initially, user should not have any identities + */ + expect(testUser.identities).toBeUndefined(); + + /** + * Link SAML identity + */ + await testUser.linkSamlIdentity(testWorkspaceId, testSamlId, testEmail); + + /** + * Check that local state is updated + */ + expect(testUser.identities).toBeDefined(); + expect(testUser.identities![testWorkspaceId]).toBeDefined(); + expect(testUser.identities![testWorkspaceId].saml).toEqual({ + id: testSamlId, + email: testEmail, + }); + }); + + it('should persist SAML identity in database', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const testSamlId = generateTestString('model-persist'); + const testEmail = generateTestString('model-test-sso@example.com'); + + testUser = await usersFactory.create(testEmail, 'test-password-123'); + /** + * Link SAML identity + */ + await testUser.linkSamlIdentity(testWorkspaceId, testSamlId, testEmail); + + /** + * Reload user from database to verify persistence + */ + const reloadedUser = await usersFactory.findById(testUser._id.toString()); + + expect(reloadedUser).not.toBeNull(); + expect(reloadedUser!.identities).toBeDefined(); + expect(reloadedUser!.identities![testWorkspaceId]).toBeDefined(); + expect(reloadedUser!.identities![testWorkspaceId].saml).toEqual({ + id: testSamlId, + email: testEmail, + }); + }); + + it('should update existing SAML identity for the same workspace', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const testEmail = generateTestString('model-test-sso@example.com'); + testUser = await usersFactory.create(testEmail, 'test-password-123'); + + /** + * Use unique SAML IDs for this test + */ + const initialSamlId = generateTestString('initial-identity'); + const newSamlId = generateTestString('updated-identity'); + const newEmail = 'updated-email@example.com'; + + /** + * Link initial identity + */ + await testUser.linkSamlIdentity(testWorkspaceId, initialSamlId, testEmail); + + /** + * Update identity for the same workspace + */ + await testUser.linkSamlIdentity(testWorkspaceId, newSamlId, newEmail); + + /** + * Check that identity is updated (not duplicated) + */ + expect(testUser.identities![testWorkspaceId].saml).toEqual({ + id: newSamlId, + email: newEmail, + }); + + /** + * Verify in database + */ + const reloadedUser = await usersFactory.findById(testUser._id.toString()); + expect(reloadedUser!.identities![testWorkspaceId].saml).toEqual({ + id: newSamlId, + email: newEmail, + }); + }); + }); + + describe('getSamlIdentity', () => { + it('should return null when identity does not exist', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const testEmail = generateTestString('model-test-sso@example.com'); + testUser = await usersFactory.create(testEmail, 'test-password-123'); + /** + * User without any identities + */ + const identity = testUser.getSamlIdentity(testWorkspaceId); + expect(identity).toBeNull(); + }); + + it('should return SAML identity when it exists', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const testSamlId = generateTestString('model-get'); + const testEmail = generateTestString('model-test-sso@example.com'); + + testUser = await usersFactory.create(testEmail, 'test-password-123'); + /** + * Link SAML identity + */ + await testUser.linkSamlIdentity(testWorkspaceId, testSamlId, testEmail); + + /** + * Get identity + */ + const identity = testUser.getSamlIdentity(testWorkspaceId); + + expect(identity).not.toBeNull(); + expect(identity).toEqual({ + id: testSamlId, + email: testEmail, + }); + }); + }); + +}); + +afterAll(async done => { + await mongo.mongoClients.hawk?.close(); + await mongo.mongoClients.events?.close(); + + done(); +}); + diff --git a/test/models/usersFactory.test.ts b/test/models/usersFactory.test.ts new file mode 100644 index 00000000..bc6fbfc6 --- /dev/null +++ b/test/models/usersFactory.test.ts @@ -0,0 +1,139 @@ +import '../../src/env-test'; +import UsersFactory from '../../src/models/usersFactory'; +import * as mongo from '../../src/mongo'; +import DataLoaders from '../../src/dataLoaders'; +import { generateTestString } from '../utils/testData'; + +beforeAll(async () => { + await mongo.setupConnections(); +}); + +describe('UsersFactory SSO identities', () => { + let usersFactory: UsersFactory; + let emailsToCleanup: string[] = []; + + const createUsersFactory = (): UsersFactory => { + return new UsersFactory( + mongo.databases.hawk as any, + new DataLoaders(mongo.databases.hawk as any) + ); + }; + + beforeEach(() => { + usersFactory = createUsersFactory(); + emailsToCleanup = []; + }); + + afterEach(async () => { + /** + * Cleanup only data created by this test. + * Do NOT drop/delete whole collections: tests can run in parallel across files. + */ + const uniqueEmails = Array.from(new Set(emailsToCleanup)); + + for (const email of uniqueEmails) { + try { + await usersFactory.deleteByEmail(email); + } catch { + /** + * Ignore cleanup errors (e.g. already deleted by the test itself) + */ + } + } + }); + + describe('findBySamlIdentity', () => { + it('should return null when user with SAML identity does not exist', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + /** + * Use unique SAML ID to avoid conflicts with other tests + */ + const uniqueSamlId = generateTestString('non-existent'); + + /** + * Try to find user with non-existent SAML identity + */ + const foundUser = await usersFactory.findBySamlIdentity( + testWorkspaceId, + uniqueSamlId + ); + + expect(foundUser).toBeNull(); + }); + + it('should find user by SAML identity', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const testEmail = generateTestString('factory-test-sso@example.com'); + /** + * Use unique SAML ID for this specific test + */ + const uniqueSamlId = generateTestString('find-test'); + + /** + * Create test user for this test + */ + const testUser = await usersFactory.create(testEmail, 'test-password-123'); + emailsToCleanup.push(testEmail); + + /** + * Link SAML identity to test user + */ + await testUser.linkSamlIdentity(testWorkspaceId, uniqueSamlId, testEmail); + + /** + * Find user by SAML identity using factory method + */ + const foundUser = await usersFactory.findBySamlIdentity( + testWorkspaceId, + uniqueSamlId + ); + + expect(foundUser).not.toBeNull(); + expect(foundUser!._id.toString()).toBe(testUser._id.toString()); + expect(foundUser!.email).toBe(testEmail); + expect(foundUser!.identities![testWorkspaceId].saml).toEqual({ + id: uniqueSamlId, + email: testEmail, + }); + }); + + it('should return null for different workspace even if SAML ID matches', async () => { + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const workspaceId2 = '507f1f77bcf86cd799439012'; + const testEmail = generateTestString('factory-test-sso@example.com'); + /** + * Use unique SAML ID for this specific test + */ + const uniqueSamlId = generateTestString('workspace-test'); + + /** + * Create test user for this test + */ + const testUser = await usersFactory.create(testEmail, 'test-password-123'); + emailsToCleanup.push(testEmail); + + /** + * Link identity for first workspace + */ + await testUser.linkSamlIdentity(testWorkspaceId, uniqueSamlId, testEmail); + + /** + * Try to find user by same SAML ID but different workspace + */ + const foundUser = await usersFactory.findBySamlIdentity( + workspaceId2, + uniqueSamlId + ); + + expect(foundUser).toBeNull(); + }); + }); +}); + +afterAll(async done => { + await mongo.mongoClients.hawk?.close(); + await mongo.mongoClients.events?.close(); + + done(); +}); + diff --git a/test/setup.ts b/test/setup.ts new file mode 100644 index 00000000..7e4a13a0 --- /dev/null +++ b/test/setup.ts @@ -0,0 +1,12 @@ +/** + * Jest setup file to provide global APIs needed by MongoDB driver + */ + +import { performance } from 'perf_hooks'; + +/** + * MongoDB 6.x requires global performance API + * Node.js provides it via perf_hooks module + */ +global.performance = performance as any; + diff --git a/test/sso/saml/controller.test.ts b/test/sso/saml/controller.test.ts new file mode 100644 index 00000000..d3d07706 --- /dev/null +++ b/test/sso/saml/controller.test.ts @@ -0,0 +1,646 @@ +import '../../../src/env-test'; +import { Request, Response } from 'express'; +import { ObjectId } from 'mongodb'; +import SamlController from '../../../src/sso/saml/controller'; +import { ContextFactories } from '../../../src/types/graphql'; +import { WorkspaceSsoConfig } from '../../../src/sso/types'; +import { WorkspaceDBScheme, UserDBScheme } from '@hawk.so/types'; +import SamlService from '../../../src/sso/saml/service'; +import { MemorySamlStateStore } from '../../../src/sso/saml/store/memory.store'; +import * as mongo from '../../../src/mongo'; +import WorkspaceModel from '../../../src/models/workspace'; +import UserModel from '../../../src/models/user'; + +/** + * Mock dependencies + */ +jest.mock('../../../src/sso/saml/service'); + +beforeAll(async () => { + /** + * Ensure MONGO_HAWK_DB_URL is set from MONGO_URL (set by @shelf/jest-mongodb) + * This is a fallback in case setup.ts didn't run or MONGO_URL wasn't available then + */ + if (process.env.MONGO_URL && !process.env.MONGO_HAWK_DB_URL) { + process.env.MONGO_HAWK_DB_URL = process.env.MONGO_URL; + } + if (process.env.MONGO_URL && !process.env.MONGO_EVENTS_DB_URL) { + process.env.MONGO_EVENTS_DB_URL = process.env.MONGO_URL; + } + + await mongo.setupConnections(); + + /** + * Verify that databases are initialized + */ + if (!mongo.databases.hawk) { + throw new Error( + `Failed to initialize MongoDB connection for tests. ` + + `MONGO_URL: ${process.env.MONGO_URL}, ` + + `MONGO_HAWK_DB_URL: ${process.env.MONGO_HAWK_DB_URL}` + ); + } +}); + +describe('SamlController', () => { + let controller: SamlController; + let mockFactories: ContextFactories; + let mockWorkspacesFactory: any; + let mockUsersFactory: any; + let mockSamlService: jest.Mocked; + let mockReq: Partial; + let mockRes: Partial; + let samlStore: MemorySamlStateStore; + + const testWorkspaceId = new ObjectId().toString(); + const testUserId = new ObjectId().toString(); + const testSamlConfig: WorkspaceSsoConfig['saml'] = { + idpEntityId: 'urn:test:idp', + ssoUrl: 'https://idp.example.com/sso', + x509Cert: '-----BEGIN CERTIFICATE-----\nTEST_CERTIFICATE\n-----END CERTIFICATE-----', + nameIdFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress', + attributeMapping: { + email: 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress', + name: 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name', + }, + }; + + /** + * Create mock workspace with SSO enabled + * Using partial mock object instead of real instance to avoid MongoDB connection issues in tests + */ + function createMockWorkspace(overrides?: Partial): Partial & { _id: ObjectId } { + const workspaceData: WorkspaceDBScheme = { + _id: new ObjectId(testWorkspaceId), + name: 'Test Workspace', + accountId: 'test-account-id', + balance: 0, + billingPeriodEventsCount: 0, + isBlocked: false, + lastChargeDate: new Date(), + tariffPlanId: new ObjectId(), + inviteHash: 'test-invite-hash', + subscriptionId: undefined, + sso: { + enabled: true, + enforced: false, + type: 'saml', + saml: testSamlConfig, + }, + ...overrides, + }; + + return { + ...workspaceData, + getMemberInfo: jest.fn(), + addMember: jest.fn(), + confirmMembership: jest.fn(), + } as any; + } + + /** + * Create mock user + * Using partial mock object instead of real instance to avoid MongoDB connection issues in tests + */ + function createMockUser(overrides?: Partial): Partial & { _id: ObjectId; email?: string } { + const userData: UserDBScheme = { + _id: new ObjectId(testUserId), + email: 'test@example.com', + notifications: { + channels: { + email: { + isEnabled: true, + endpoint: 'test@example.com', + minPeriod: 60, + }, + }, + whatToReceive: {} as any, + }, + ...overrides, + }; + + return { + ...userData, + linkSamlIdentity: jest.fn(), + addWorkspace: jest.fn(), + confirmMembership: jest.fn(), + generateTokensPair: jest.fn().mockResolvedValue({ + accessToken: 'test-access-token', + refreshToken: 'test-refresh-token', + }), + } as any; + } + + beforeEach(() => { + /** + * Clear all mocks + */ + jest.clearAllMocks(); + + /** + * Create fresh store instance for each test + */ + samlStore = new MemorySamlStateStore(); + samlStore.clear(); + + /** + * Setup environment variables + */ + process.env.API_URL = 'https://api.example.com'; + process.env.GARAGE_URL = 'https://garage.example.com'; + process.env.SSO_SP_ENTITY_ID = 'urn:hawk:tracker:saml'; + + /** + * Mock factories + */ + mockWorkspacesFactory = { + findById: jest.fn(), + }; + + mockUsersFactory = { + findBySamlIdentity: jest.fn(), + findByEmail: jest.fn(), + create: jest.fn(), + }; + + mockFactories = { + workspacesFactory: mockWorkspacesFactory as any, + usersFactory: mockUsersFactory as any, + projectsFactory: {} as any, + plansFactory: {} as any, + businessOperationsFactory: {} as any, + releasesFactory: {} as any, + }; + + /** + * Mock SamlService + */ + mockSamlService = { + generateAuthnRequest: jest.fn(), + validateAndParseResponse: jest.fn(), + } as any; + + (SamlService as jest.Mock).mockImplementation(() => mockSamlService); + + /** + * Create controller with store + */ + controller = new SamlController(mockFactories, samlStore); + + /** + * Mock Express Request + */ + mockReq = { + params: {}, + query: {}, + body: {}, + }; + + /** + * Mock Express Response + */ + mockRes = { + status: jest.fn().mockReturnThis(), + json: jest.fn().mockReturnThis(), + redirect: jest.fn().mockReturnThis(), + }; + }); + + afterEach(() => { + /** + * Clean up environment + */ + Reflect.deleteProperty(process.env, 'API_URL'); + Reflect.deleteProperty(process.env, 'GARAGE_URL'); + Reflect.deleteProperty(process.env, 'SSO_SP_ENTITY_ID'); + }); + + describe('initiateLogin', () => { + const testReturnUrl = '/workspace/test'; + + beforeEach(() => { + mockReq.params = { workspaceId: testWorkspaceId }; + mockReq.query = { returnUrl: testReturnUrl }; + }); + + it('should redirect to IdP with SAMLRequest and RelayState when SSO is enabled', async () => { + const workspace = createMockWorkspace(); + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + + const mockRequestId = '_test-request-id-12345'; + const mockEncodedRequest = 'encoded-saml-request'; + mockSamlService.generateAuthnRequest.mockResolvedValue({ + requestId: mockRequestId, + encodedRequest: mockEncodedRequest, + }); + + await controller.initiateLogin(mockReq as Request, mockRes as Response); + + /** + * Verify workspace was fetched + */ + expect(mockWorkspacesFactory.findById).toHaveBeenCalledWith(testWorkspaceId); + + /** + * Verify AuthnRequest was generated + */ + expect(mockSamlService.generateAuthnRequest).toHaveBeenCalledWith( + testWorkspaceId, + expect.stringContaining(`/auth/sso/saml/${testWorkspaceId}/acs`), + expect.any(String), + testSamlConfig + ); + + /** + * Verify redirect to IdP with SAMLRequest and RelayState + */ + expect(mockRes.redirect).toHaveBeenCalledWith( + expect.stringContaining('https://idp.example.com/sso') // got from testSamlConfig.ssoUrl + ); + + const redirectUrl = new URL((mockRes.redirect as jest.Mock).mock.calls[0][0]); + expect(redirectUrl.searchParams.get('SAMLRequest')).toBe(mockEncodedRequest); + expect(redirectUrl.searchParams.get('RelayState')).toBeTruthy(); + + /** + * Verify AuthnRequest ID was saved by checking it can be validated + */ + expect(await samlStore.validateAndConsumeAuthnRequest(mockRequestId, testWorkspaceId)).toBe(true); + }); + + it('should use default returnUrl when not provided', async () => { + const workspace = createMockWorkspace(); + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockReq.query = {}; + + const mockRequestId = '_test-request-id-12345'; + mockSamlService.generateAuthnRequest.mockResolvedValue({ + requestId: mockRequestId, + encodedRequest: 'encoded-saml-request', + }); + + await controller.initiateLogin(mockReq as Request, mockRes as Response); + + /** + * Verify redirect contains RelayState + */ + const redirectCall = (mockRes.redirect as jest.Mock).mock.calls[0][0]; + const redirectUrl = new URL(redirectCall); + const relayStateId = redirectUrl.searchParams.get('RelayState'); + expect(relayStateId).toBeTruthy(); + + /** + * Verify that default returnUrl was saved in RelayState + * Default returnUrl is `/workspace/${workspaceId}` + */ + const relayState = await samlStore.getRelayState(relayStateId!); + expect(relayState).not.toBeNull(); + expect(relayState?.returnUrl).toBe(`/workspace/${testWorkspaceId}`); + expect(relayState?.workspaceId).toBe(testWorkspaceId); + }); + + it('should return 400 error when workspace is not found', async () => { + mockWorkspacesFactory.findById.mockResolvedValue(null); + + await controller.initiateLogin(mockReq as Request, mockRes as Response); + + expect(mockRes.status).toHaveBeenCalledWith(400); + expect(mockRes.redirect).not.toHaveBeenCalled(); + }); + + it('should return 400 error when workspace exists but SSO is not configured', async () => { + const workspace = createMockWorkspace({ sso: undefined }); + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + + await controller.initiateLogin(mockReq as Request, mockRes as Response); + + expect(mockRes.status).toHaveBeenCalledWith(400); + expect(mockRes.json).toHaveBeenCalledWith({ + error: 'SSO is not enabled for this workspace', + }); + expect(mockRes.redirect).not.toHaveBeenCalled(); + }); + + it('should return 400 error when SSO is disabled', async () => { + const workspace = createMockWorkspace({ + sso: { + enabled: false, + enforced: false, + type: 'saml', + saml: testSamlConfig, + }, + }); + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + + await controller.initiateLogin(mockReq as Request, mockRes as Response); + + expect(mockRes.status).toHaveBeenCalledWith(400); + expect(mockRes.json).toHaveBeenCalledWith({ + error: 'SSO is not enabled for this workspace', + }); + }); + }); + + describe('handleAcs', () => { + const testSamlResponse = 'base64-encoded-saml-response'; + const testRelayStateId = 'test-relay-state-id'; + const testNameId = 'user@idp.example.com'; + const testEmail = 'user@example.com'; + const testRequestId = '_test-request-id-12345'; + + const mockSamlResponseData = { + nameId: testNameId, + email: testEmail, + name: 'Test User', + inResponseTo: testRequestId, + }; + + beforeEach(() => { + mockReq.params = { workspaceId: testWorkspaceId }; + mockReq.body = { + SAMLResponse: testSamlResponse, + RelayState: testRelayStateId, + }; + }); + + it('should process SAML response and redirect to frontend with tokens', async () => { + const workspace = createMockWorkspace(); + const user = createMockUser(); + + /** + * Setup test data + */ + const testReturnUrl = '/workspace/test'; + const expectedCallbackPath = `/login/sso/${testWorkspaceId}`; + + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockUsersFactory.findBySamlIdentity.mockResolvedValue(user); + mockSamlService.validateAndParseResponse.mockResolvedValue(mockSamlResponseData); + + /** + * Setup samlStore to return valid state for tests + */ + await samlStore.saveRelayState(testRelayStateId, { + returnUrl: testReturnUrl, + workspaceId: testWorkspaceId, + }); + await samlStore.saveAuthnRequest(testRequestId, testWorkspaceId); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + /** + * Verify workspace was fetched + */ + expect(mockWorkspacesFactory.findById).toHaveBeenCalledWith(testWorkspaceId); + + /** + * Verify SAML response was validated + */ + expect(mockSamlService.validateAndParseResponse).toHaveBeenCalledWith( + testSamlResponse, + testWorkspaceId, + expect.stringContaining(`/auth/sso/saml/${testWorkspaceId}/acs`), + testSamlConfig + ); + + /** + * Verify InResponseTo validation was performed + * (samlStore is singleton, validation happens internally) + */ + + /** + * Verify user lookup + */ + expect(mockUsersFactory.findBySamlIdentity).toHaveBeenCalledWith( + testWorkspaceId, + testNameId + ); + + /** + * Verify tokens were generated + */ + expect(user.generateTokensPair).toHaveBeenCalled(); + + /** + * Verify redirect to Garage SSO callback page with tokens and returnUrl + * GARAGE_URL is set in beforeEach: 'https://garage.example.com' + */ + expect(mockRes.redirect).toHaveBeenCalledWith( + expect.stringContaining(expectedCallbackPath) + ); + + const redirectUrl = new URL((mockRes.redirect as jest.Mock).mock.calls[0][0]); + expect(redirectUrl.pathname).toBe(expectedCallbackPath); + expect(redirectUrl.searchParams.get('access_token')).toBe('test-access-token'); + expect(redirectUrl.searchParams.get('refresh_token')).toBe('test-refresh-token'); + expect(redirectUrl.searchParams.get('returnUrl')).toBe(testReturnUrl); + }); + + it('should return 400 error when workspace is not found', async () => { + mockWorkspacesFactory.findById.mockResolvedValue(null); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + expect(mockRes.status).toHaveBeenCalledWith(400); + expect(mockRes.json).toHaveBeenCalledWith({ + error: 'SSO is not enabled for this workspace', + }); + }); + + it('should return 400 error when SSO is not enabled', async () => { + const workspace = createMockWorkspace({ sso: undefined }); + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + expect(mockRes.status).toHaveBeenCalledWith(400); + expect(mockRes.json).toHaveBeenCalledWith({ + error: 'SSO is not enabled for this workspace', + }); + }); + + it('should return 400 error when SAML validation fails', async () => { + const workspace = createMockWorkspace(); + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockSamlService.validateAndParseResponse.mockRejectedValue( + new Error('Invalid signature') + ); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + expect(mockRes.status).toHaveBeenCalledWith(400); + expect(mockRes.json).toHaveBeenCalledWith({ + error: 'Invalid SAML response', + }); + expect(mockRes.redirect).not.toHaveBeenCalled(); + }); + + it('should return 400 error when InResponseTo validation fails', async () => { + const workspace = createMockWorkspace(); + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockSamlService.validateAndParseResponse.mockResolvedValue(mockSamlResponseData); + + /** + * Don't save AuthnRequest, so validation will fail + */ + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + expect(mockRes.status).toHaveBeenCalledWith(400); + expect(mockRes.json).toHaveBeenCalledWith({ + error: 'Invalid SAML response: InResponseTo validation failed', + }); + }); + + it('should create user with JIT provisioning when user not found', async () => { + const workspace = createMockWorkspace(); + const newUser = createMockUser({ email: testEmail }); + + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockUsersFactory.findBySamlIdentity.mockResolvedValue(null); + mockUsersFactory.findByEmail.mockResolvedValue(null); + mockUsersFactory.create.mockResolvedValue(newUser); + mockSamlService.validateAndParseResponse.mockResolvedValue(mockSamlResponseData); + + /** + * Setup samlStore with valid state + */ + await samlStore.saveRelayState(testRelayStateId, { + returnUrl: '/workspace/test', + workspaceId: testWorkspaceId, + }); + await samlStore.saveAuthnRequest(testRequestId, testWorkspaceId); + (workspace.getMemberInfo as jest.Mock).mockResolvedValue(null); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + /** + * Verify user was created + */ + expect(mockUsersFactory.create).toHaveBeenCalledWith(testEmail, undefined, undefined); + + /** + * Verify SAML identity was linked + */ + expect(newUser.linkSamlIdentity).toHaveBeenCalledWith( + testWorkspaceId, + testNameId, + testEmail + ); + + /** + * Verify user was added to workspace + */ + expect(workspace.addMember).toHaveBeenCalledWith(newUser._id.toString()); + expect(newUser.addWorkspace).toHaveBeenCalledWith(testWorkspaceId); + + expect(mockRes.redirect).toHaveBeenCalled(); + }); + + it('should link existing user when found by email', async () => { + const workspace = createMockWorkspace(); + const existingUser = createMockUser({ email: testEmail }); + + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockUsersFactory.findBySamlIdentity.mockResolvedValue(null); + mockUsersFactory.findByEmail.mockResolvedValue(existingUser); + mockSamlService.validateAndParseResponse.mockResolvedValue(mockSamlResponseData); + + /** + * Setup samlStore with valid state + */ + await samlStore.saveRelayState(testRelayStateId, { + returnUrl: '/workspace/test', + workspaceId: testWorkspaceId, + }); + await samlStore.saveAuthnRequest(testRequestId, testWorkspaceId); + (workspace.getMemberInfo as jest.Mock).mockResolvedValue(null); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + /** + * Verify user was not created + */ + expect(mockUsersFactory.create).not.toHaveBeenCalled(); + + /** + * Verify SAML identity was linked to existing user + */ + expect(existingUser.linkSamlIdentity).toHaveBeenCalledWith( + testWorkspaceId, + testNameId, + testEmail + ); + }); + + it('should confirm pending membership when user is pending', async () => { + const workspace = createMockWorkspace(); + const user = createMockUser(); + + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockUsersFactory.findBySamlIdentity.mockResolvedValue(null); + mockUsersFactory.findByEmail.mockResolvedValue(user); + mockSamlService.validateAndParseResponse.mockResolvedValue(mockSamlResponseData); + + /** + * Setup samlStore with valid state + */ + await samlStore.saveRelayState(testRelayStateId, { + returnUrl: '/workspace/test', + workspaceId: testWorkspaceId, + }); + await samlStore.saveAuthnRequest(testRequestId, testWorkspaceId); + (workspace.getMemberInfo as jest.Mock).mockResolvedValue({ + userEmail: testEmail, + }); + + /** + * Mock isPendingMember static method + */ + const isPendingMemberSpy = jest.spyOn(WorkspaceModel, 'isPendingMember').mockReturnValue(true); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + /** + * Restore mock after test + */ + isPendingMemberSpy.mockRestore(); + + /** + * Verify pending membership was confirmed + */ + expect(workspace.confirmMembership).toHaveBeenCalledWith(user); + expect(user.confirmMembership).toHaveBeenCalledWith(testWorkspaceId); + }); + + it('should use default returnUrl when RelayState is not found', async () => { + const workspace = createMockWorkspace(); + const user = createMockUser(); + + mockWorkspacesFactory.findById.mockResolvedValue(workspace); + mockUsersFactory.findBySamlIdentity.mockResolvedValue(user); + mockSamlService.validateAndParseResponse.mockResolvedValue(mockSamlResponseData); + + /** + * Setup samlStore with AuthnRequest but no RelayState + */ + await samlStore.saveAuthnRequest(testRequestId, testWorkspaceId); + + await controller.handleAcs(mockReq as Request, mockRes as Response); + + /** + * Verify redirect to Garage SSO callback page with default returnUrl + */ + const expectedCallbackPath = `/login/sso/${testWorkspaceId}`; + const defaultReturnUrl = `/workspace/${testWorkspaceId}`; + + expect(mockRes.redirect).toHaveBeenCalledWith( + expect.stringContaining(expectedCallbackPath) + ); + + const redirectUrl = new URL((mockRes.redirect as jest.Mock).mock.calls[0][0]); + expect(redirectUrl.pathname).toBe(expectedCallbackPath); + expect(redirectUrl.searchParams.get('returnUrl')).toBe(defaultReturnUrl); + }); + }); +}); diff --git a/test/sso/saml/service.test.ts b/test/sso/saml/service.test.ts new file mode 100644 index 00000000..425442e5 --- /dev/null +++ b/test/sso/saml/service.test.ts @@ -0,0 +1,392 @@ +import '../../../src/env-test'; +import SamlService from '../../../src/sso/saml/service'; +import { SamlConfig } from '../../../src/sso/types'; +import { SamlValidationError, SamlValidationErrorType } from '../../../src/sso/saml/types'; +import * as nodeSaml from '@node-saml/node-saml'; + +/** + * Mock @node-saml/node-saml + */ +jest.mock('@node-saml/node-saml'); + +describe('SamlService', () => { + let samlService: SamlService; + const testWorkspaceId = '507f1f77bcf86cd799439011'; + const testAcsUrl = 'https://api.example.com/auth/sso/saml/507f1f77bcf86cd799439011/acs'; + + /** + * Test SAML configuration + */ + const testSamlConfig: SamlConfig = { + idpEntityId: 'urn:test:idp', + ssoUrl: 'https://idp.example.com/sso', + x509Cert: '-----BEGIN CERTIFICATE-----\nTEST_CERTIFICATE\n-----END CERTIFICATE-----', + nameIdFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress', + attributeMapping: { + email: 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress', + name: 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name', + }, + }; + + const mockSamlInstance = { + validatePostResponseAsync: jest.fn(), + getAuthorizeMessageAsync: jest.fn(), + }; + + beforeEach(() => { + jest.clearAllMocks(); + (nodeSaml.SAML as jest.Mock).mockImplementation(() => mockSamlInstance); + process.env.SSO_SP_ENTITY_ID = 'urn:hawk:tracker:saml'; + samlService = new SamlService(); + }); + + afterEach(() => { + /** + * Restore env + */ + Reflect.deleteProperty(process.env, 'SSO_SP_ENTITY_ID'); + }); + + describe('generateAuthnRequest', () => { + const testRelayState = 'test-relay-state-123'; + + /** + * Helper to create a mock SAML AuthnRequest (deflated + base64 encoded) + */ + function createMockEncodedRequest(requestId: string): string { + const zlib = require('zlib'); + const xml = ` + + urn:hawk:tracker:saml + `; + + const deflated = zlib.deflateRawSync(xml); + + return deflated.toString('base64'); + } + + it('should generate AuthnRequest and return request ID', async () => { + const mockRequestId = '_test-request-id-12345'; + const mockEncodedRequest = createMockEncodedRequest(mockRequestId); + + mockSamlInstance.getAuthorizeMessageAsync.mockResolvedValue({ + SAMLRequest: mockEncodedRequest, + RelayState: testRelayState, + }); + + const result = await samlService.generateAuthnRequest( + testWorkspaceId, + testAcsUrl, + testRelayState, + testSamlConfig + ); + + expect(result.requestId).toBe(mockRequestId); + expect(result.encodedRequest).toBe(mockEncodedRequest); + }); + + it('should call getAuthorizeMessageAsync with correct relay state', async () => { + const mockRequestId = '_another-request-id'; + const mockEncodedRequest = createMockEncodedRequest(mockRequestId); + + mockSamlInstance.getAuthorizeMessageAsync.mockResolvedValue({ + SAMLRequest: mockEncodedRequest, + }); + + await samlService.generateAuthnRequest( + testWorkspaceId, + testAcsUrl, + testRelayState, + testSamlConfig + ); + + expect(mockSamlInstance.getAuthorizeMessageAsync).toHaveBeenCalledWith( + testRelayState, + undefined, + {} + ); + }); + + it('should throw error when SAMLRequest is not returned', async () => { + mockSamlInstance.getAuthorizeMessageAsync.mockResolvedValue({ + /** + * No SAMLRequest in response + */ + }); + + await expect( + samlService.generateAuthnRequest( + testWorkspaceId, + testAcsUrl, + testRelayState, + testSamlConfig + ) + ).rejects.toThrow('Failed to generate SAML AuthnRequest'); + }); + + it('should throw error when request ID cannot be extracted', async () => { + const zlib = require('zlib'); + /** + * Invalid XML without ID attribute + */ + const invalidXml = 'no id here'; + const deflated = zlib.deflateRawSync(invalidXml); + const invalidEncodedRequest = deflated.toString('base64'); + + mockSamlInstance.getAuthorizeMessageAsync.mockResolvedValue({ + SAMLRequest: invalidEncodedRequest, + }); + + await expect( + samlService.generateAuthnRequest( + testWorkspaceId, + testAcsUrl, + testRelayState, + testSamlConfig + ) + ).rejects.toThrow('Failed to extract request ID from AuthnRequest'); + }); + }); + + describe('validateAndParseResponse', () => { + const testSamlResponse = 'base64EncodedSamlResponse'; + + it('should parse valid SAML Response and extract user data', async () => { + /** + * Mock successful SAML validation with all required attributes + */ + mockSamlInstance.validatePostResponseAsync.mockResolvedValue({ + profile: { + nameID: 'user-name-id-123', + inResponseTo: '_request-id-123', + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': 'user@example.com', + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name': 'John Doe', + }, + }); + + const result = await samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + + expect(result).toEqual({ + nameId: 'user-name-id-123', + email: 'user@example.com', + name: 'John Doe', + inResponseTo: '_request-id-123', + }); + }); + + it('should work without optional name attribute', async () => { + mockSamlInstance.validatePostResponseAsync.mockResolvedValue({ + profile: { + nameID: 'user-name-id-123', + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': 'user@example.com', + /** + * name attribute is not provided by IdP + */ + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name': undefined, + }, + }); + + const result = await samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + + expect(result.nameId).toBe('user-name-id-123'); + expect(result.email).toBe('user@example.com'); + expect(result.name).toBeUndefined(); + }); + + it('should throw INVALID_SIGNATURE error when signature validation fails', async () => { + mockSamlInstance.validatePostResponseAsync.mockRejectedValue( + new Error('Invalid signature') + ); + + await expect( + samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ) + ).rejects.toThrow(SamlValidationError); + + try { + await samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + } catch (error) { + expect(error).toBeInstanceOf(SamlValidationError); + expect((error as SamlValidationError).type).toBe(SamlValidationErrorType.INVALID_SIGNATURE); + } + }); + + it('should throw EXPIRED_ASSERTION error when assertion is expired', async () => { + mockSamlInstance.validatePostResponseAsync.mockRejectedValue( + new Error('SAML assertion NotOnOrAfter condition not met') + ); + + const promise = samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + + await expect(promise).rejects.toThrow(SamlValidationError); + await expect(promise).rejects.toMatchObject({ + type: SamlValidationErrorType.EXPIRED_ASSERTION, + }); + }); + + it('should throw INVALID_AUDIENCE error when audience validation fails', async () => { + mockSamlInstance.validatePostResponseAsync.mockRejectedValue( + new Error('SAML Audience not valid') + ); + + const promise = samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + + await expect(promise).rejects.toThrow(SamlValidationError); + await expect(promise).rejects.toMatchObject({ + type: SamlValidationErrorType.INVALID_AUDIENCE, + }); + }); + + it('should throw INVALID_NAME_ID error when NameID is missing', async () => { + mockSamlInstance.validatePostResponseAsync.mockResolvedValue({ + profile: { + /** + * No nameID in profile + */ + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': 'user@example.com', + }, + }); + + const promise = samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + + await expect(promise).rejects.toThrow(SamlValidationError); + await expect(promise).rejects.toMatchObject({ + type: SamlValidationErrorType.INVALID_NAME_ID, + }); + }); + + it('should throw MISSING_EMAIL error when email attribute is not found', async () => { + mockSamlInstance.validatePostResponseAsync.mockResolvedValue({ + profile: { + nameID: 'user-name-id-123', + /** + * Wrong attribute name, email attribute is missing + */ + 'wrong-attribute': 'user@example.com', + }, + }); + + const promise = samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + + await expect(promise).rejects.toThrow(SamlValidationError); + await expect(promise).rejects.toMatchObject({ + type: SamlValidationErrorType.MISSING_EMAIL, + }); + }); + + it('should throw INVALID_IN_RESPONSE_TO when InResponseTo does not match expected request ID', async () => { + mockSamlInstance.validatePostResponseAsync.mockResolvedValue({ + profile: { + nameID: 'user-name-id-123', + inResponseTo: '_different-request-id', + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': 'user@example.com', + }, + }); + + const promise = samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig, + '_expected-request-id' + ); + + await expect(promise).rejects.toThrow(SamlValidationError); + await expect(promise).rejects.toMatchObject({ + type: SamlValidationErrorType.INVALID_IN_RESPONSE_TO, + context: { + expected: '_expected-request-id', + received: '_different-request-id', + }, + }); + }); + + it('should validate InResponseTo when expectedRequestId is provided', async () => { + mockSamlInstance.validatePostResponseAsync.mockResolvedValue({ + profile: { + nameID: 'user-name-id-123', + inResponseTo: '_expected-request-id', + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': 'user@example.com', + }, + }); + + const result = await samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig, + '_expected-request-id' + ); + + expect(result.inResponseTo).toBe('_expected-request-id'); + }); + + it('should handle email as array attribute', async () => { + mockSamlInstance.validatePostResponseAsync.mockResolvedValue({ + profile: { + nameID: 'user-name-id-123', + /** + * Some IdPs return attributes as arrays + */ + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': ['user@example.com', 'secondary@example.com'], + }, + }); + + const result = await samlService.validateAndParseResponse( + testSamlResponse, + testWorkspaceId, + testAcsUrl, + testSamlConfig + ); + + /** + * Should use first email from array + */ + expect(result.email).toBe('user@example.com'); + }); + }); +}); diff --git a/test/sso/saml/store.test.ts b/test/sso/saml/store.test.ts new file mode 100644 index 00000000..e648c8c7 --- /dev/null +++ b/test/sso/saml/store.test.ts @@ -0,0 +1,178 @@ +import '../../../src/env-test'; +import { MemorySamlStateStore } from '../../../src/sso/saml/store/memory.store'; + +describe('SamlStateStore', () => { + let SamlStateStore: MemorySamlStateStore; + + beforeEach(() => { + /** + * Create fresh instance for each test + */ + SamlStateStore = new MemorySamlStateStore(); + SamlStateStore.clear(); + }); + + afterEach(() => { + SamlStateStore.stopCleanupTimer(); + }); + + describe('RelayState', () => { + const testStateId = 'test-state-id-123'; + const testData = { + returnUrl: '/workspace/abc123', + workspaceId: '507f1f77bcf86cd799439011', + }; + + it('should save and retrieve RelayState', async () => { + await SamlStateStore.saveRelayState(testStateId, testData); + + const result = await SamlStateStore.getRelayState(testStateId); + + expect(result).toEqual(testData); + }); + + it('should return null for non-existent RelayState', async () => { + const result = await SamlStateStore.getRelayState('non-existent-id'); + + expect(result).toBeNull(); + }); + + it('should consume (delete) RelayState after retrieval (prevent replay)', async () => { + await SamlStateStore.saveRelayState(testStateId, testData); + + /** + * First retrieval should return data + */ + const firstResult = await SamlStateStore.getRelayState(testStateId); + + expect(firstResult).toEqual(testData); + + /** + * Second retrieval should return null (consumed) + */ + const secondResult = await SamlStateStore.getRelayState(testStateId); + + expect(secondResult).toBeNull(); + }); + + it('should return null for expired RelayState', async () => { + /** + * Mock Date.now to simulate expiration + */ + const originalDateNow = Date.now; + const startTime = 1000000000000; + + Date.now = jest.fn().mockReturnValue(startTime); + await SamlStateStore.saveRelayState(testStateId, testData); + + /** + * Move time forward by 6 minutes (past 5 min TTL) + */ + Date.now = jest.fn().mockReturnValue(startTime + 6 * 60 * 1000); + const result = await SamlStateStore.getRelayState(testStateId); + + expect(result).toBeNull(); + + /** + * Restore Date.now + */ + Date.now = originalDateNow; + }); + }); + + describe('AuthnRequest', () => { + const testRequestId = '_request-id-abc123'; + const testWorkspaceId = '507f1f77bcf86cd799439011'; + + it('should save and validate AuthnRequest', async () => { + await SamlStateStore.saveAuthnRequest(testRequestId, testWorkspaceId); + + const result = await SamlStateStore.validateAndConsumeAuthnRequest( + testRequestId, + testWorkspaceId + ); + + expect(result).toBe(true); + }); + + it('should return false for non-existent AuthnRequest', async () => { + const result = await SamlStateStore.validateAndConsumeAuthnRequest( + 'non-existent-request', + testWorkspaceId + ); + + expect(result).toBe(false); + }); + + it('should return false for wrong workspace ID', async () => { + await SamlStateStore.saveAuthnRequest(testRequestId, testWorkspaceId); + + const result = await SamlStateStore.validateAndConsumeAuthnRequest( + testRequestId, + 'different-workspace-id' + ); + + expect(result).toBe(false); + }); + + it('should consume (delete) AuthnRequest after validation (prevent replay)', async () => { + await SamlStateStore.saveAuthnRequest(testRequestId, testWorkspaceId); + + /** + * First validation should succeed + */ + const firstResult = await SamlStateStore.validateAndConsumeAuthnRequest( + testRequestId, + testWorkspaceId + ); + + expect(firstResult).toBe(true); + + /** + * Second validation should fail (consumed) + */ + const secondResult = await SamlStateStore.validateAndConsumeAuthnRequest( + testRequestId, + testWorkspaceId + ); + + expect(secondResult).toBe(false); + }); + + it('should return false for expired AuthnRequest', async () => { + const originalDateNow = Date.now; + const startTime = 1000000000000; + + Date.now = jest.fn().mockReturnValue(startTime); + await SamlStateStore.saveAuthnRequest(testRequestId, testWorkspaceId); + + /** + * Move time forward by 6 minutes (past 5 min TTL) + */ + Date.now = jest.fn().mockReturnValue(startTime + 6 * 60 * 1000); + const result = await SamlStateStore.validateAndConsumeAuthnRequest( + testRequestId, + testWorkspaceId + ); + + expect(result).toBe(false); + + Date.now = originalDateNow; + }); + }); + + describe('clear', () => { + it('should clear all stored state', async () => { + await SamlStateStore.saveRelayState('state-1', { + returnUrl: '/test', + workspaceId: 'ws-1', + }); + await SamlStateStore.saveAuthnRequest('request-1', 'ws-1'); + + SamlStateStore.clear(); + + expect(await SamlStateStore.getRelayState('state-1')).toBeNull(); + expect(await SamlStateStore.validateAndConsumeAuthnRequest('request-1', 'ws-1')).toBe(false); + }); + }); +}); diff --git a/test/sso/saml/utils.test.ts b/test/sso/saml/utils.test.ts new file mode 100644 index 00000000..d9d0b51b --- /dev/null +++ b/test/sso/saml/utils.test.ts @@ -0,0 +1,65 @@ +import '../../../src/env-test'; +import { extractAttribute } from '../../../src/sso/saml/utils'; + +describe('SAML Utils', () => { + describe('extractAttribute', () => { + it('should return string value when attribute is a string', () => { + const attributes = { + email: 'user@example.com', + }; + + const result = extractAttribute(attributes, 'email'); + + expect(result).toBe('user@example.com'); + }); + + it('should return first element when attribute is an array', () => { + const attributes = { + email: ['primary@example.com', 'secondary@example.com'], + }; + + const result = extractAttribute(attributes, 'email'); + + expect(result).toBe('primary@example.com'); + }); + + it('should return undefined when attribute does not exist', () => { + const attributes = { + name: 'John Doe', + }; + + const result = extractAttribute(attributes, 'email'); + + expect(result).toBeUndefined(); + }); + + it('should return undefined when array is empty', () => { + const attributes = { + email: [] as string[], + }; + + const result = extractAttribute(attributes, 'email'); + + expect(result).toBeUndefined(); + }); + + it('should work with SAML-style attribute names', () => { + const attributes = { + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': 'user@example.com', + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name': 'John Doe', + }; + + const email = extractAttribute( + attributes, + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress' + ); + const name = extractAttribute( + attributes, + 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name' + ); + + expect(email).toBe('user@example.com'); + expect(name).toBe('John Doe'); + }); + }); +}); diff --git a/test/tsconfig.json b/test/tsconfig.json new file mode 100644 index 00000000..7764034b --- /dev/null +++ b/test/tsconfig.json @@ -0,0 +1,15 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "types": ["jest"], + "noEmit": true + }, + "include": [ + "../src/**/*", + "./**/*" + ], + "exclude": [ + "./integration/**/*" + ] +} + diff --git a/test/utils/testData.ts b/test/utils/testData.ts new file mode 100644 index 00000000..4eb4a4ae --- /dev/null +++ b/test/utils/testData.ts @@ -0,0 +1,25 @@ +/** + * Generic test data generators. + * + * Keep these helpers narrowly scoped and named by intent to avoid mixing concerns + * (e.g. do not use SAML ID generator for emails). + */ + +/** + * Generates a unique test string. + * + * Useful when tests run in parallel and share the same DB: unique values prevent + * collisions and accidental cross-test matches. + * + * Format: `{prefix}-{timestamp}-{random}` + * + * @example const testEmail = generateTestString('factory-test-sso@example.com'); + */ +export function generateTestString(prefix: string): string { + const timestamp = Date.now(); + const random = Math.random().toString(36).substring(2, 9); + + return `${prefix}-${timestamp}-${random}`; +} + + diff --git a/yarn.lock b/yarn.lock index 0782faeb..4f3e19a1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -491,12 +491,12 @@ dependencies: "@types/mongodb" "^3.5.34" -"@hawk.so/types@^0.1.37": - version "0.1.37" - resolved "https://registry.yarnpkg.com/@hawk.so/types/-/types-0.1.37.tgz#e68d822957d86aac4fa1fdec7927a046ce0cf8c8" - integrity sha512-34C+TOWA5oJyOL3W+NXlSyY7u0OKkRu2+tIZ4jSJp0c1/5v+qpEPeo07FlOOHqDRRhMG4/2PAgQCronfF2qWPg== +"@hawk.so/types@^0.4.2": + version "0.4.2" + resolved "https://registry.yarnpkg.com/@hawk.so/types/-/types-0.4.2.tgz#85482495a951de47ba8be88725d56ab2d72184bc" + integrity sha512-0eY/XYhloRiTq3M7d76WrRToVsDjSxXP/gMtBbbNc0qo9RFjmrS4JKsNx52EQVPdBBdi6s6njcGF1DrhYCrQUA== dependencies: - "@types/mongodb" "^3.5.34" + bson "^7.0.0" "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" @@ -560,6 +560,13 @@ slash "^3.0.0" strip-ansi "^6.0.0" +"@jest/create-cache-key-function@^30.0.0": + version "30.2.0" + resolved "https://registry.yarnpkg.com/@jest/create-cache-key-function/-/create-cache-key-function-30.2.0.tgz#86dbaf8cce43e8a0266180a5236b6f0b3be9d09b" + integrity sha512-44F4l4Enf+MirJN8X/NhdGkl71k5rBYiwdVlo4HxOwbu0sHV8QKrGEedb1VUU4K3W7fBKE0HGfbn7eZm0Ti3zg== + dependencies: + "@jest/types" "30.2.0" + "@jest/environment@^26.6.2": version "26.6.2" resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-26.6.2.tgz#ba364cc72e221e79cc8f0a99555bf5d7577cf92c" @@ -591,6 +598,14 @@ "@jest/types" "^26.6.2" expect "^26.6.2" +"@jest/pattern@30.0.1": + version "30.0.1" + resolved "https://registry.yarnpkg.com/@jest/pattern/-/pattern-30.0.1.tgz#d5304147f49a052900b4b853dedb111d080e199f" + integrity sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA== + dependencies: + "@types/node" "*" + jest-regex-util "30.0.1" + "@jest/reporters@^26.6.2": version "26.6.2" resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-26.6.2.tgz#1f518b99637a5f18307bd3ecf9275f6882a667f6" @@ -623,6 +638,13 @@ optionalDependencies: node-notifier "^8.0.0" +"@jest/schemas@30.0.5": + version "30.0.5" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-30.0.5.tgz#7bdf69fc5a368a5abdb49fd91036c55225846473" + integrity sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA== + dependencies: + "@sinclair/typebox" "^0.34.0" + "@jest/source-map@^26.6.2": version "26.6.2" resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-26.6.2.tgz#29af5e1e2e324cafccc936f218309f54ab69d535" @@ -674,6 +696,19 @@ source-map "^0.6.1" write-file-atomic "^3.0.0" +"@jest/types@30.2.0": + version "30.2.0" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-30.2.0.tgz#1c678a7924b8f59eafd4c77d56b6d0ba976d62b8" + integrity sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg== + dependencies: + "@jest/pattern" "30.0.1" + "@jest/schemas" "30.0.5" + "@types/istanbul-lib-coverage" "^2.0.6" + "@types/istanbul-reports" "^3.0.4" + "@types/node" "*" + "@types/yargs" "^17.0.33" + chalk "^4.1.2" + "@jest/types@^26.6.2": version "26.6.2" resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" @@ -753,11 +788,36 @@ semver "^7.3.5" tar "^6.1.11" +"@mongodb-js/saslprep@^1.3.0": + version "1.4.4" + resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.4.4.tgz#34a946ff6ae142e8f2259b87f2935f8284ba874d" + integrity sha512-p7X/ytJDIdwUfFL/CLOhKgdfJe1Fa8uw9seJYvdOmnP9JBWGWHW69HkOixXS6Wy9yvGf1MbhcS6lVmrhy4jm2g== + dependencies: + sparse-bitfield "^3.0.3" + "@n1ru4l/json-patch-plus@^0.2.0": version "0.2.0" resolved "https://registry.yarnpkg.com/@n1ru4l/json-patch-plus/-/json-patch-plus-0.2.0.tgz#b8fa09fd980c3460dfdc109a7c4cc5590157aa6b" integrity sha512-pLkJy83/rVfDTyQgDSC8GeXAHEdXNHGNJrB1b7wAyGQu0iv7tpMXntKVSqj0+XKNVQbco40SZffNfVALzIt0SQ== +"@node-saml/node-saml@^5.0.1": + version "5.1.0" + resolved "https://registry.yarnpkg.com/@node-saml/node-saml/-/node-saml-5.1.0.tgz#43d61d4ea882f2960a44c7be5ae0030dafea2382" + integrity sha512-t3cJnZ4aC7HhPZ6MGylGZULvUtBOZ6FzuUndaHGXjmIZHXnLfC/7L8a57O9Q9V7AxJGKAiRM5zu2wNm9EsvQpw== + dependencies: + "@types/debug" "^4.1.12" + "@types/qs" "^6.9.18" + "@types/xml-encryption" "^1.2.4" + "@types/xml2js" "^0.4.14" + "@xmldom/is-dom-node" "^1.0.1" + "@xmldom/xmldom" "^0.8.10" + debug "^4.4.0" + xml-crypto "^6.1.2" + xml-encryption "^3.1.0" + xml2js "^0.6.2" + xmlbuilder "^15.1.1" + xpath "^0.0.34" + "@opentelemetry/api@1.9.0", "@opentelemetry/api@^1.4.0": version "1.9.0" resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe" @@ -855,14 +915,19 @@ resolved "https://registry.yarnpkg.com/@redis/time-series/-/time-series-1.1.0.tgz#cba454c05ec201bd5547aaf55286d44682ac8eb5" integrity sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g== -"@shelf/jest-mongodb@^1.2.2": - version "1.3.4" - resolved "https://registry.yarnpkg.com/@shelf/jest-mongodb/-/jest-mongodb-1.3.4.tgz#200bac386cf513bed2d41952b1857689f0b88f31" - integrity sha512-PQe/5jN8wHr30d8422+2CV+XzbJTCFLGxzb0OrwbxrRiNdZA+FFXOqVak1vd3dqk4qogmmqEVQFkwQ4PNHzNgA== +"@shelf/jest-mongodb@^6.0.2": + version "6.0.2" + resolved "https://registry.yarnpkg.com/@shelf/jest-mongodb/-/jest-mongodb-6.0.2.tgz#4a78de55120071b1cbf57cd9f153b1501ecb2586" + integrity sha512-lULA6h73jdKjUwVDQu6r0qzQ8mgLGsBAXdqgNZoAQiEpKcom+RuyDbbMfmn4b5aqImUKnc7wC/AbYpGjdMEX0Q== dependencies: - debug "4.3.2" - mongodb-memory-server "6.9.6" - uuid "8.3.2" + "@swc/jest" "0.2.39" + debug "4.4.1" + mongodb-memory-server "10.3.0" + +"@sinclair/typebox@^0.34.0": + version "0.34.41" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.34.41.tgz#aa51a6c1946df2c5a11494a2cdb9318e026db16c" + integrity sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g== "@sinonjs/commons@^1.7.0": version "1.8.3" @@ -883,6 +948,96 @@ resolved "https://registry.yarnpkg.com/@standard-schema/spec/-/spec-1.0.0.tgz#f193b73dc316c4170f2e82a881da0f550d551b9c" integrity sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA== +"@swc/core-darwin-arm64@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.7.tgz#b7ac4660d2d4df324f4f5a6f58a0c3d44d71ff1c" + integrity sha512-+hNVUfezUid7LeSHqnhoC6Gh3BROABxjlDNInuZ/fie1RUxaEX4qzDwdTgozJELgHhvYxyPIg1ro8ibnKtgO4g== + +"@swc/core-darwin-x64@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.15.7.tgz#4500d361b389459e32a5acacf89426d4865761c2" + integrity sha512-ZAFuvtSYZTuXPcrhanaD5eyp27H8LlDzx2NAeVyH0FchYcuXf0h5/k3GL9ZU6Jw9eQ63R1E8KBgpXEJlgRwZUQ== + +"@swc/core-linux-arm-gnueabihf@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.7.tgz#2f7a8dcdf1880a03f38f0a4a2814a254bdf07684" + integrity sha512-K3HTYocpqnOw8KcD8SBFxiDHjIma7G/X+bLdfWqf+qzETNBrzOub/IEkq9UaeupaJiZJkPptr/2EhEXXWryS/A== + +"@swc/core-linux-arm64-gnu@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.7.tgz#dba63e6a5ae6139588312049f1897208c2d94c14" + integrity sha512-HCnVIlsLnCtQ3uXcXgWrvQ6SAraskLA9QJo9ykTnqTH6TvUYqEta+TdTdGjzngD6TOE7XjlAiUs/RBtU8Z0t+Q== + +"@swc/core-linux-arm64-musl@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.7.tgz#f23572b6d2f6d18e691111f1f238d32861e22528" + integrity sha512-/OOp9UZBg4v2q9+x/U21Jtld0Wb8ghzBScwhscI7YvoSh4E8RALaJ1msV8V8AKkBkZH7FUAFB7Vbv0oVzZsezA== + +"@swc/core-linux-x64-gnu@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.7.tgz#7efa998cb3199f282583f5055ae3396daba0ba81" + integrity sha512-VBbs4gtD4XQxrHuQ2/2+TDZpPQQgrOHYRnS6SyJW+dw0Nj/OomRqH+n5Z4e/TgKRRbieufipeIGvADYC/90PYQ== + +"@swc/core-linux-x64-musl@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.7.tgz#d5c88a8c367baf9f6b4215e8f6c9eae680451087" + integrity sha512-kVuy2unodso6p0rMauS2zby8/bhzoGRYxBDyD6i2tls/fEYAE74oP0VPFzxIyHaIjK1SN6u5TgvV9MpyJ5xVug== + +"@swc/core-win32-arm64-msvc@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.7.tgz#08e175725f0796337e3299b94b701ebb7bfdc14f" + integrity sha512-uddYoo5Xmo1XKLhAnh4NBIyy5d0xk33x1sX3nIJboFySLNz878ksCFCZ3IBqrt1Za0gaoIWoOSSSk0eNhAc/sw== + +"@swc/core-win32-ia32-msvc@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.7.tgz#6ad4aeff1bd39f0482e905719f8390d53b072854" + integrity sha512-rqq8JjNMLx3QNlh0aPTtN/4+BGLEHC94rj9mkH1stoNRf3ra6IksNHMHy+V1HUqElEgcZyx+0yeXx3eLOTcoFw== + +"@swc/core-win32-x64-msvc@1.15.7": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.7.tgz#52f7d558144cee9a3cc43ba7e6bd8b83a8acae77" + integrity sha512-4BK06EGdPnuplgcNhmSbOIiLdRgHYX3v1nl4HXo5uo4GZMfllXaCyBUes+0ePRfwbn9OFgVhCWPcYYjMT6hycQ== + +"@swc/core@^1.3.0": + version "1.15.7" + resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.15.7.tgz#630a90c5801c7ed3dfd25d94da6b1eab22e52971" + integrity sha512-kTGB8XI7P+pTKW83tnUEDVP4zduF951u3UAOn5eTi0vyW6MvL56A3+ggMdfuVFtDI0/DsbSzf5z34HVBbuScWw== + dependencies: + "@swc/counter" "^0.1.3" + "@swc/types" "^0.1.25" + optionalDependencies: + "@swc/core-darwin-arm64" "1.15.7" + "@swc/core-darwin-x64" "1.15.7" + "@swc/core-linux-arm-gnueabihf" "1.15.7" + "@swc/core-linux-arm64-gnu" "1.15.7" + "@swc/core-linux-arm64-musl" "1.15.7" + "@swc/core-linux-x64-gnu" "1.15.7" + "@swc/core-linux-x64-musl" "1.15.7" + "@swc/core-win32-arm64-msvc" "1.15.7" + "@swc/core-win32-ia32-msvc" "1.15.7" + "@swc/core-win32-x64-msvc" "1.15.7" + +"@swc/counter@^0.1.3": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@swc/counter/-/counter-0.1.3.tgz#cc7463bd02949611c6329596fccd2b0ec782b0e9" + integrity sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ== + +"@swc/jest@0.2.39": + version "0.2.39" + resolved "https://registry.yarnpkg.com/@swc/jest/-/jest-0.2.39.tgz#482bee0adb0726fab1487a4f902a278ec563a6b7" + integrity sha512-eyokjOwYd0Q8RnMHri+8/FS1HIrIUKK/sRrFp8c1dThUOfNeCWbLmBP1P5VsKdvmkd25JaH+OKYwEYiAYg9YAA== + dependencies: + "@jest/create-cache-key-function" "^30.0.0" + "@swc/counter" "^0.1.3" + jsonc-parser "^3.2.0" + +"@swc/types@^0.1.25": + version "0.1.25" + resolved "https://registry.yarnpkg.com/@swc/types/-/types-0.1.25.tgz#b517b2a60feb37dd933e542d93093719e4cf1078" + integrity sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g== + dependencies: + "@swc/counter" "^0.1.3" + "@tootallnate/once@1": version "1.1.2" resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" @@ -976,7 +1131,7 @@ "@types/connect" "*" "@types/node" "*" -"@types/bson@*", "@types/bson@^4.0.5": +"@types/bson@*": version "4.2.0" resolved "https://registry.yarnpkg.com/@types/bson/-/bson-4.2.0.tgz#a2f71e933ff54b2c3bf267b67fa221e295a33337" integrity sha512-ELCPqAdroMdcuxqwMgUpifQyRoTpyYCNr1V9xKyF40VsBobsj+BbWNRvwGchMgBPGqkw655ypkjj2MEF5ywVwg== @@ -1010,6 +1165,13 @@ resolved "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.12.tgz#6b2c510a7ad7039e98e7b8d3d6598f4359e5c080" integrity sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw== +"@types/debug@^4.1.12": + version "4.1.12" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.12.tgz#a155f21690871953410df4b6b6f53187f0500917" + integrity sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ== + dependencies: + "@types/ms" "*" + "@types/debug@^4.1.5": version "4.1.7" resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82" @@ -1094,6 +1256,11 @@ resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== +"@types/istanbul-lib-coverage@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz#7739c232a1fee9b4d3ce8985f314c0c6d33549d7" + integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w== + "@types/istanbul-lib-report@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" @@ -1108,6 +1275,13 @@ dependencies: "@types/istanbul-lib-report" "*" +"@types/istanbul-reports@^3.0.4": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz#0f03e3d2f670fbdac586e34b433783070cc16f54" + integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ== + dependencies: + "@types/istanbul-lib-report" "*" + "@types/jest@^26.0.8": version "26.0.24" resolved "https://registry.yarnpkg.com/@types/jest/-/jest-26.0.24.tgz#943d11976b16739185913a1936e0de0c4a7d595a" @@ -1193,7 +1367,7 @@ resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.0.tgz#e9a9903894405c6a6551f1774df4e64d9804d69c" integrity sha512-fccbsHKqFDXClBZTDLA43zl0+TbxyIwyzIzwwhvoJvhNjOErCdeX2xJbURimv2EbSVUGav001PaCJg4mZxMl4w== -"@types/mongodb@^3.5.34", "@types/mongodb@^3.6.20": +"@types/mongodb@^3.5.34": version "3.6.20" resolved "https://registry.yarnpkg.com/@types/mongodb/-/mongodb-3.6.20.tgz#b7c5c580644f6364002b649af1c06c3c0454e1d2" integrity sha512-WcdpPJCakFzcWWD9juKoZbRtQxKIMYF/JIAM4JrNHrMcnJL6/a2NWjXxW7fo9hxboxxkg+icff8d7+WIEvKgYQ== @@ -1266,6 +1440,11 @@ resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== +"@types/qs@^6.9.18": + version "6.14.0" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.14.0.tgz#d8b60cecf62f2db0fb68e5e006077b9178b85de5" + integrity sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ== + "@types/qs@^6.9.7": version "6.9.17" resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.17.tgz#fc560f60946d0aeff2f914eb41679659d3310e1a" @@ -1314,6 +1493,32 @@ resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-8.3.4.tgz#bd86a43617df0594787d38b735f55c805becf1bc" integrity sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw== +"@types/webidl-conversions@*": + version "7.0.3" + resolved "https://registry.yarnpkg.com/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz#1306dbfa53768bcbcfc95a1c8cde367975581859" + integrity sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA== + +"@types/whatwg-url@^11.0.2": + version "11.0.5" + resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-11.0.5.tgz#aaa2546e60f0c99209ca13360c32c78caf2c409f" + integrity sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ== + dependencies: + "@types/webidl-conversions" "*" + +"@types/xml-encryption@^1.2.4": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@types/xml-encryption/-/xml-encryption-1.2.4.tgz#0eceea58c82a89f62c0a2dc383a6461dfc2fe1ba" + integrity sha512-I69K/WW1Dv7j6O3jh13z0X8sLWJRXbu5xnHDl9yHzUNDUBtUoBY058eb5s+x/WG6yZC1h8aKdI2EoyEPjyEh+Q== + dependencies: + "@types/node" "*" + +"@types/xml2js@^0.4.14": + version "0.4.14" + resolved "https://registry.yarnpkg.com/@types/xml2js/-/xml2js-0.4.14.tgz#5d462a2a7330345e2309c6b549a183a376de8f9a" + integrity sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ== + dependencies: + "@types/node" "*" + "@types/yargs-parser@*": version "21.0.0" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" @@ -1326,6 +1531,13 @@ dependencies: "@types/yargs-parser" "*" +"@types/yargs@^17.0.33": + version "17.0.35" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.35.tgz#07013e46aa4d7d7d50a49e15604c1c5340d4eb24" + integrity sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg== + dependencies: + "@types/yargs-parser" "*" + "@typescript-eslint/eslint-plugin@^2.12.0": version "2.34.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.34.0.tgz#6f8ce8a46c7dea4a6f1d171d2bb8fbae6dac2be9" @@ -1374,6 +1586,16 @@ resolved "https://registry.yarnpkg.com/@vercel/oidc/-/oidc-3.0.3.tgz#82c2b6dd4d5c3b37dcb1189718cdeb9db402d052" integrity sha512-yNEQvPcVrK9sIe637+I0jD6leluPxzwJKx/Haw6F4H77CdDsszUn5V3o96LPziXkSNE2B83+Z3mjqGKBK/R6Gg== +"@xmldom/is-dom-node@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@xmldom/is-dom-node/-/is-dom-node-1.0.1.tgz#83b9f3e1260fb008061c6fa787b93a00f9be0629" + integrity sha512-CJDxIgE5I0FH+ttq/Fxy6nRpxP70+e2O048EPe85J2use3XKdatVM7dDVvFNjQudd9B49NPoZ+8PG49zj4Er8Q== + +"@xmldom/xmldom@^0.8.10", "@xmldom/xmldom@^0.8.5": + version "0.8.11" + resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.8.11.tgz#b79de2d67389734c57c52595f7a7305e30c2d608" + integrity sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw== + abab@^2.0.3, abab@^2.0.5: version "2.0.6" resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" @@ -1432,6 +1654,11 @@ agent-base@6: dependencies: debug "4" +agent-base@^7.1.2: + version "7.1.4" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.4.tgz#e3cd76d4c548ee895d3c3fd8dc1f6c5b9032e7a8" + integrity sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ== + ai@^5.0.89: version "5.0.89" resolved "https://registry.yarnpkg.com/ai/-/ai-5.0.89.tgz#8929fbc18f247aa9e4442836a12aa84191edf2a4" @@ -1704,6 +1931,13 @@ async-mutex@^0.3.0: dependencies: tslib "^2.3.1" +async-mutex@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/async-mutex/-/async-mutex-0.5.0.tgz#353c69a0b9e75250971a64ac203b0ebfddd75482" + integrity sha512-1A94B18jkJ3DYq284ohPxoXbfTA5HsQ7/Mf4DEhcyLx3Bz27Rh59iScbB6EPiP+B+joue6YCxcMXSbFC1tZKwA== + dependencies: + tslib "^2.4.0" + async-retry@^1.2.1: version "1.3.3" resolved "https://registry.yarnpkg.com/async-retry/-/async-retry-1.3.3.tgz#0e7f36c04d8478e7a58bdbed80cedf977785f280" @@ -1769,6 +2003,11 @@ axios@^0.27.2: follow-redirects "^1.14.9" form-data "^4.0.0" +b4a@^1.6.4: + version "1.7.3" + resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.7.3.tgz#24cf7ccda28f5465b66aec2bac69e32809bf112f" + integrity sha512-5Q2mfq2WfGuFp3uS//0s6baOJLMoVduPYVeNmDYxu5OUA1/cBfvr2RIS7vi62LdNj/urk1hfmj867I3qt6uZ7Q== + babel-jest@^26.6.3: version "26.6.3" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056" @@ -1835,6 +2074,11 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== +bare-events@^2.7.0: + version "2.8.2" + resolved "https://registry.yarnpkg.com/bare-events/-/bare-events-2.8.2.tgz#7b3e10bd8e1fc80daf38bb516921678f566ab89f" + integrity sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ== + base64-js@^1.0.2, base64-js@^1.3.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" @@ -1979,17 +2223,10 @@ bser@2.1.1: dependencies: node-int64 "^0.4.0" -bson@*, bson@^4.6.5: - version "4.6.5" - resolved "https://registry.yarnpkg.com/bson/-/bson-4.6.5.tgz#1a410148c20eef4e40d484878a037a7036e840fb" - integrity sha512-uqrgcjyOaZsHfz7ea8zLRCLe1u+QGUSzMZmvXqO24CDW7DWoW1qiN9folSwa7hSneTSgM2ykDIzF5kcQQ8cwNw== - dependencies: - buffer "^5.6.0" - -bson@^1.1.4: - version "1.1.6" - resolved "https://registry.yarnpkg.com/bson/-/bson-1.1.6.tgz#fb819be9a60cd677e0853aee4ca712a785d6618a" - integrity sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg== +bson@*, bson@^1.1.4, bson@^6.10.4, bson@^6.7.0, bson@^7.0.0: + version "6.10.4" + resolved "https://registry.yarnpkg.com/bson/-/bson-6.10.4.tgz#d530733bb5bb16fb25c162e01a3344fab332fd2b" + integrity sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng== buffer-crc32@~0.2.3: version "0.2.13" @@ -2020,7 +2257,7 @@ buffer@4.9.2: ieee754 "^1.1.4" isarray "^1.0.0" -buffer@^5.5.0, buffer@^5.6.0: +buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -2089,7 +2326,7 @@ camelcase@^5.0.0, camelcase@^5.3.1: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -camelcase@^6.0.0, camelcase@^6.1.0: +camelcase@^6.0.0, camelcase@^6.1.0, camelcase@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== @@ -2115,7 +2352,7 @@ chalk@^2.0.0, chalk@^2.1.0: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.0.0, chalk@^4.1.0: +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -2396,15 +2633,6 @@ cross-spawn@^7.0.0: shebang-command "^2.0.0" which "^2.0.1" -cross-spawn@^7.0.3: - version "7.0.6" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" - integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - cssfilter@0.0.10: version "0.0.10" resolved "https://registry.yarnpkg.com/cssfilter/-/cssfilter-0.0.10.tgz#c6d2672632a2e5c83e013e6864a42ce8defd20ae" @@ -2460,12 +2688,12 @@ debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: dependencies: ms "2.1.2" -debug@4.3.2: - version "4.3.2" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" - integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== +debug@4.4.1: + version "4.4.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b" + integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ== dependencies: - ms "2.1.2" + ms "^2.1.3" debug@^3.2.7: version "3.2.7" @@ -2481,6 +2709,13 @@ debug@^4.2.0: dependencies: ms "^2.1.3" +debug@^4.3.4, debug@^4.4.0, debug@^4.4.3: + version "4.4.3" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.3.tgz#c6ae432d9bd9662582fce08709b038c58e9e3d6a" + integrity sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA== + dependencies: + ms "^2.1.3" + decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" @@ -3007,6 +3242,13 @@ etag@~1.8.1: resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== +events-universal@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/events-universal/-/events-universal-1.0.1.tgz#b56a84fd611b6610e0a2d0f09f80fdf931e2dfe6" + integrity sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw== + dependencies: + bare-events "^2.7.0" + events@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924" @@ -3160,6 +3402,11 @@ fast-deep-equal@^3.1.1: resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== +fast-fifo@^1.2.0, fast-fifo@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.3.2.tgz#286e31de96eb96d38a97899815740ba2a4f3640c" + integrity sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ== + fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" @@ -3228,7 +3475,7 @@ finalhandler@1.2.0: statuses "2.0.1" unpipe "~1.0.0" -find-cache-dir@^3.3.1: +find-cache-dir@^3.3.1, find-cache-dir@^3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== @@ -3286,6 +3533,11 @@ follow-redirects@^1.14.0, follow-redirects@^1.14.9: resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== +follow-redirects@^1.15.11: + version "1.15.11" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.11.tgz#777d73d72a92f8ec4d2e410eb47352a56b8e8340" + integrity sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ== + for-each@^0.3.3: version "0.3.3" resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" @@ -3725,6 +3977,14 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" +https-proxy-agent@^7.0.6: + version "7.0.6" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz#da8dfeac7da130b05c2ba4b59c9b6cd66611a6b9" + integrity sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw== + dependencies: + agent-base "^7.1.2" + debug "4" + human-signals@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" @@ -4382,6 +4642,11 @@ jest-pnp-resolver@^1.2.2: resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== +jest-regex-util@30.0.1: + version "30.0.1" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-30.0.1.tgz#f17c1de3958b67dfe485354f5a10093298f2a49b" + integrity sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA== + jest-regex-util@^26.0.0: version "26.0.0" resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-26.0.0.tgz#d25e7184b36e39fd466c3bc41be0971e821fee28" @@ -4642,6 +4907,11 @@ json5@^1.0.1: dependencies: minimist "^1.2.0" +jsonc-parser@^3.2.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.3.1.tgz#f2a524b4f7fd11e3d791e559977ad60b98b798b4" + integrity sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ== + jsonfile@^6.0.1: version "6.1.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" @@ -4756,13 +5026,6 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" -lockfile@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-1.0.4.tgz#07f819d25ae48f87e538e6578b6964a4981a5609" - integrity sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA== - dependencies: - signal-exit "^3.0.2" - lodash.clonedeep@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" @@ -5021,6 +5284,32 @@ mkdirp@^0.5.1: dependencies: minimist "^1.2.6" +mongodb-connection-string-url@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz#e223089dfa0a5fa9bf505f8aedcbc67b077b33e7" + integrity sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA== + dependencies: + "@types/whatwg-url" "^11.0.2" + whatwg-url "^14.1.0 || ^13.0.0" + +mongodb-memory-server-core@10.3.0: + version "10.3.0" + resolved "https://registry.yarnpkg.com/mongodb-memory-server-core/-/mongodb-memory-server-core-10.3.0.tgz#4d7aace17395525e899d5d57477339fe3b693a86" + integrity sha512-tp+ZfTBAPqHXjROhAFg6HcVVzXaEhh/iHcbY7QPOIiLwr94OkBFAw4pixyGSfP5wI2SZeEA13lXyRmBAhugWgA== + dependencies: + async-mutex "^0.5.0" + camelcase "^6.3.0" + debug "^4.4.3" + find-cache-dir "^3.3.2" + follow-redirects "^1.15.11" + https-proxy-agent "^7.0.6" + mongodb "^6.9.0" + new-find-package-json "^2.0.0" + semver "^7.7.3" + tar-stream "^3.1.7" + tslib "^2.8.1" + yauzl "^3.2.0" + mongodb-memory-server-core@6.10.0: version "6.10.0" resolved "https://registry.yarnpkg.com/mongodb-memory-server-core/-/mongodb-memory-server-core-6.10.0.tgz#9239c7941e5b0a225b50494563f0fc528c056690" @@ -5044,36 +5333,13 @@ mongodb-memory-server-core@6.10.0: uuid "^8.3.1" yauzl "^2.10.0" -mongodb-memory-server-core@6.9.6: - version "6.9.6" - resolved "https://registry.yarnpkg.com/mongodb-memory-server-core/-/mongodb-memory-server-core-6.9.6.tgz#90ef0562bea675ef68bd687533792da02bcc81f3" - integrity sha512-ZcXHTI2TccH3L5N9JyAMGm8bbAsfLn8SUWOeYGHx/vDx7vu4qshyaNXTIxeHjpUQA29N+Z1LtTXA6vXjl1eg6w== - dependencies: - "@types/tmp" "^0.2.0" - camelcase "^6.0.0" - cross-spawn "^7.0.3" - debug "^4.2.0" - find-cache-dir "^3.3.1" - find-package-json "^1.2.0" - get-port "^5.1.1" - https-proxy-agent "^5.0.0" - lockfile "^1.0.4" - md5-file "^5.0.0" - mkdirp "^1.0.4" - semver "^7.3.2" - tar-stream "^2.1.4" - tmp "^0.2.1" - uuid "^8.3.0" - yauzl "^2.10.0" - optionalDependencies: - mongodb "^3.6.2" - -mongodb-memory-server@6.9.6: - version "6.9.6" - resolved "https://registry.yarnpkg.com/mongodb-memory-server/-/mongodb-memory-server-6.9.6.tgz#ced1a100f58363317a562efaf8821726c433cfd2" - integrity sha512-BjGPPh5f61lMueG7px9DneBIrRR/GoWUHDvLWVAXhQhKVcwMMXxgeEba6zdDolZHfYAu6aYGPzhOuYKIKPgpBQ== +mongodb-memory-server@10.3.0: + version "10.3.0" + resolved "https://registry.yarnpkg.com/mongodb-memory-server/-/mongodb-memory-server-10.3.0.tgz#c8496c73ea73c8d87fe168b3c8b89434c2fb43b8" + integrity sha512-dRNr2uEhMgjEe6kgqS+ITBKBbl2cz0DNBjNZ12BGUckvEOAHbhd3R7q/lFPSZrZ6AMKa2EOUJdAmFF1WlqSbsA== dependencies: - mongodb-memory-server-core "6.9.6" + mongodb-memory-server-core "10.3.0" + tslib "^2.8.1" mongodb-memory-server@^6.6.1: version "6.10.0" @@ -5096,7 +5362,7 @@ mongodb@3.5.9: optionalDependencies: saslprep "^1.0.0" -mongodb@^3.6.2, mongodb@^3.6.9: +mongodb@^3.6.9: version "3.7.4" resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-3.7.4.tgz#119530d826361c3e12ac409b769796d6977037a4" integrity sha512-K5q8aBqEXMwWdVNh94UQTwZ6BejVbFhh1uB6c5FKtPE9eUMZPUO3sRZdgIEcHSrAWmxzpG/FeODDKL388sqRmw== @@ -5109,18 +5375,14 @@ mongodb@^3.6.2, mongodb@^3.6.9: optionalDependencies: saslprep "^1.0.0" -mongodb@^3.7.3: - version "3.7.3" - resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-3.7.3.tgz#b7949cfd0adc4cc7d32d3f2034214d4475f175a5" - integrity sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw== +mongodb@^6.0.0, mongodb@^6.9.0: + version "6.21.0" + resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-6.21.0.tgz#f83355905900f2e7a912593f0315d5e2e0bda576" + integrity sha512-URyb/VXMjJ4da46OeSXg+puO39XH9DeQpWCslifrRn9JWugy0D+DvvBvkm2WxmHe61O/H19JM66p1z7RHVkZ6A== dependencies: - bl "^2.2.1" - bson "^1.1.4" - denque "^1.4.1" - optional-require "^1.1.8" - safe-buffer "^5.1.2" - optionalDependencies: - saslprep "^1.0.0" + "@mongodb-js/saslprep" "^1.3.0" + bson "^6.10.4" + mongodb-connection-string-url "^3.0.2" morgan@^1.10.1: version "1.10.1" @@ -5180,6 +5442,13 @@ negotiator@0.6.3: resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== +new-find-package-json@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/new-find-package-json/-/new-find-package-json-2.0.0.tgz#96553638781db35061f351e8ccb4d07126b6407d" + integrity sha512-lDcBsjBSMlj3LXH2v/FW3txlh2pYTjmbOXPYJD93HI5EwuLzI11tdHSIpUMmfq/IOsldj4Ps8M8flhm+pCK4Ew== + dependencies: + debug "^4.3.4" + nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -5689,6 +5958,11 @@ punycode@^2.1.0, punycode@^2.1.1: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +punycode@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + qs@6.10.3: version "6.10.3" resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" @@ -6086,6 +6360,11 @@ semver@^6.0.0, semver@^6.1.0, semver@^6.1.2, semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^7.7.3: + version "7.7.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.3.tgz#4b5f4143d007633a8dc671cd0a6ef9147b8bb946" + integrity sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q== + semver@~7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" @@ -6406,6 +6685,15 @@ streamsearch@0.1.2: resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a" integrity sha512-jos8u++JKm0ARcSUTAZXOVC0mSox7Bhn6sBgty73P1f3JGf7yG2clTbBNHUdde/kdvP2FESam+vM6l8jBrNxHA== +streamx@^2.15.0: + version "2.23.0" + resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.23.0.tgz#7d0f3d00d4a6c5de5728aecd6422b4008d66fd0b" + integrity sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg== + dependencies: + events-universal "^1.0.0" + fast-fifo "^1.3.2" + text-decoder "^1.1.0" + string-length@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" @@ -6566,6 +6854,15 @@ tar-stream@^2.1.4: inherits "^2.0.3" readable-stream "^3.1.1" +tar-stream@^3.1.7: + version "3.1.7" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b" + integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ== + dependencies: + b4a "^1.6.4" + fast-fifo "^1.2.0" + streamx "^2.15.0" + tar@^6.1.11: version "6.1.11" resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" @@ -6602,6 +6899,13 @@ test-exclude@^6.0.0: glob "^7.1.4" minimatch "^3.0.4" +text-decoder@^1.1.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/text-decoder/-/text-decoder-1.2.3.tgz#b19da364d981b2326d5f43099c310cc80d770c65" + integrity sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA== + dependencies: + b4a "^1.6.4" + text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" @@ -6699,6 +7003,13 @@ tr46@^2.1.0: dependencies: punycode "^2.1.1" +tr46@^5.1.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-5.1.1.tgz#96ae867cddb8fdb64a49cc3059a8d428bcf238ca" + integrity sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw== + dependencies: + punycode "^2.3.1" + tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" @@ -6790,7 +7101,7 @@ tslib@^2.1.0, tslib@^2.4.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== -tslib@^2.3.0, tslib@^2.3.1: +tslib@^2.3.0, tslib@^2.3.1, tslib@^2.8.1: version "2.8.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== @@ -6980,7 +7291,7 @@ uuid@8.0.0: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.0.0.tgz#bc6ccf91b5ff0ac07bbcdbf1c7c4e150db4dbb6c" integrity sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw== -uuid@8.3.2, uuid@^8.0.0, uuid@^8.3.0, uuid@^8.3.1, uuid@^8.3.2: +uuid@^8.0.0, uuid@^8.3.0, uuid@^8.3.1, uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== @@ -7058,6 +7369,11 @@ webidl-conversions@^6.1.0: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== +webidl-conversions@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-7.0.0.tgz#256b4e1882be7debbf01d05f0aa2039778ea080a" + integrity sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g== + whatwg-encoding@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" @@ -7075,6 +7391,14 @@ whatwg-mimetype@^3.0.0: resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz#5fa1a7623867ff1af6ca3dc72ad6b8a4208beba7" integrity sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q== +"whatwg-url@^14.1.0 || ^13.0.0": + version "14.2.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-14.2.0.tgz#4ee02d5d725155dae004f6ae95c73e7ef5d95663" + integrity sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw== + dependencies: + tr46 "^5.1.0" + webidl-conversions "^7.0.0" + whatwg-url@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" @@ -7182,6 +7506,24 @@ ws@^7.4.6: resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== +xml-crypto@^6.1.2: + version "6.1.2" + resolved "https://registry.yarnpkg.com/xml-crypto/-/xml-crypto-6.1.2.tgz#ed93e87d9538f92ad1ad2db442e9ec586723d07d" + integrity sha512-leBOVQdVi8FvPJrMYoum7Ici9qyxfE4kVi+AkpUoYCSXaQF4IlBm1cneTK9oAxR61LpYxTx7lNcsnBIeRpGW2w== + dependencies: + "@xmldom/is-dom-node" "^1.0.1" + "@xmldom/xmldom" "^0.8.10" + xpath "^0.0.33" + +xml-encryption@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/xml-encryption/-/xml-encryption-3.1.0.tgz#f3e91c4508aafd0c21892151ded91013dcd51ca2" + integrity sha512-PV7qnYpoAMXbf1kvQkqMScLeQpjCMixddAKq9PtqVrho8HnYbBOWNfG0kA4R7zxQDo7w9kiYAyzS/ullAyO55Q== + dependencies: + "@xmldom/xmldom" "^0.8.5" + escape-html "^1.0.3" + xpath "0.0.32" + xml-name-validator@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" @@ -7195,6 +7537,24 @@ xml2js@0.4.19: sax ">=0.6.0" xmlbuilder "~9.0.1" +xml2js@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" + integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + +xmlbuilder@^15.1.1: + version "15.1.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-15.1.1.tgz#9dcdce49eea66d8d10b42cae94a79c3c8d0c2ec5" + integrity sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg== + +xmlbuilder@~11.0.0: + version "11.0.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== + xmlbuilder@~9.0.1: version "9.0.7" resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d" @@ -7205,6 +7565,21 @@ xmlchars@^2.2.0: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== +xpath@0.0.32: + version "0.0.32" + resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.32.tgz#1b73d3351af736e17ec078d6da4b8175405c48af" + integrity sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw== + +xpath@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.33.tgz#5136b6094227c5df92002e7c3a13516a5074eb07" + integrity sha512-NNXnzrkDrAzalLhIUc01jO2mOzXGXh1JwPgkihcLLzw98c0WgYDmmjSh1Kl3wzaxSVWMuA+fe0WTWOBDWCBmNA== + +xpath@^0.0.34: + version "0.0.34" + resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.34.tgz#a769255e8816e0938e1e0005f2baa7279be8be12" + integrity sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA== + xss@^1.0.8: version "1.0.13" resolved "https://registry.yarnpkg.com/xss/-/xss-1.0.13.tgz#6e48f616128b39f366dfadc57411e1eb5b341c6c" @@ -7266,6 +7641,14 @@ yauzl@^2.10.0: buffer-crc32 "~0.2.3" fd-slicer "~1.1.0" +yauzl@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-3.2.0.tgz#7b6cb548f09a48a6177ea0be8ece48deb7da45c0" + integrity sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w== + dependencies: + buffer-crc32 "~0.2.3" + pend "~1.2.0" + yn@3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"