diff --git a/.env.example b/.env.example index 044613856e..402cb79cba 100644 --- a/.env.example +++ b/.env.example @@ -36,16 +36,22 @@ CLIENT_ID_HEROKU= CLIENT_ID_VERCEL= CLIENT_ID_NETLIFY= CLIENT_ID_GITHUB= +CLIENT_ID_GITHUB_APP= +CLIENT_SLUG_GITHUB_APP= CLIENT_ID_GITLAB= CLIENT_ID_BITBUCKET= CLIENT_SECRET_HEROKU= CLIENT_SECRET_VERCEL= CLIENT_SECRET_NETLIFY= CLIENT_SECRET_GITHUB= +CLIENT_SECRET_GITHUB_APP= CLIENT_SECRET_GITLAB= CLIENT_SECRET_BITBUCKET= CLIENT_SLUG_VERCEL= +CLIENT_PRIVATE_KEY_GITHUB_APP= +CLIENT_APP_ID_GITHUB_APP= + # Sentry (optional) for monitoring errors SENTRY_DSN= @@ -75,3 +81,10 @@ OTEL_OTLP_PUSH_INTERVAL= OTEL_COLLECTOR_BASIC_AUTH_USERNAME= OTEL_COLLECTOR_BASIC_AUTH_PASSWORD= + +PLAIN_API_KEY= +PLAIN_WISH_LABEL_IDS= + +SSL_CLIENT_CERTIFICATE_HEADER_KEY= + +ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true diff --git a/.env.migration.example b/.env.migration.example index 4d1c8f9ef5..2c5f5b9570 100644 --- a/.env.migration.example +++ b/.env.migration.example @@ -1 +1,2 @@ DB_CONNECTION_URI= +AUDIT_LOGS_DB_CONNECTION_URI= diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 8ec62ef24d..16a8285782 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -6,6 +6,7 @@ - [ ] Bug fix - [ ] New feature +- [ ] Improvement - [ ] Breaking change - [ ] Documentation diff --git a/.github/workflows/build-binaries.yml b/.github/workflows/build-binaries.yml new file mode 100644 index 0000000000..552eb56e46 --- /dev/null +++ b/.github/workflows/build-binaries.yml @@ -0,0 +1,189 @@ +name: Build Binaries and Deploy + +on: + workflow_dispatch: + inputs: + version: + description: "Version number" + required: true + type: string +defaults: + run: + working-directory: ./backend + +jobs: + build-and-deploy: + strategy: + matrix: + arch: [x64, arm64] + os: [linux, win] + include: + - os: linux + target: node20-linux + - os: win + target: node20-win + runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }} + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: 20 + + - name: Install pkg + run: npm install -g @yao-pkg/pkg + + - name: Install dependencies (backend) + run: npm install + + - name: Install dependencies (frontend) + run: npm install --prefix ../frontend + + - name: Prerequisites for pkg + run: npm run binary:build + + - name: Package into node binary + run: | + if [ "${{ matrix.os }}" != "linux" ]; then + pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} . + else + pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core . + fi + + # Set up .deb package structure (Debian/Ubuntu only) + - name: Set up .deb package structure + if: matrix.os == 'linux' + run: | + mkdir -p infisical-core/DEBIAN + mkdir -p infisical-core/usr/local/bin + cp ./binary/infisical-core infisical-core/usr/local/bin/ + chmod +x infisical-core/usr/local/bin/infisical-core + + - name: Create control file + if: matrix.os == 'linux' + run: | + cat < infisical-core/DEBIAN/control + Package: infisical-core + Version: ${{ github.event.inputs.version }} + Section: base + Priority: optional + Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }} + Maintainer: Infisical + Description: Infisical Core standalone executable (app.infisical.com) + EOF + + # Build .deb file (Debian/Ubunutu only) + - name: Build .deb package + if: matrix.os == 'linux' + run: | + dpkg-deb --build infisical-core + mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb + + ### RPM + + # Set up .rpm package structure + - name: Set up .rpm package structure + if: matrix.os == 'linux' + run: | + mkdir -p infisical-core-rpm/usr/local/bin + cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/ + chmod +x infisical-core-rpm/usr/local/bin/infisical-core + + # Install RPM build tools + - name: Install RPM build tools + if: matrix.os == 'linux' + run: sudo apt-get update && sudo apt-get install -y rpm + + # Create .spec file for RPM + - name: Create .spec file for RPM + if: matrix.os == 'linux' + run: | + cat < infisical-core.spec + + %global _enable_debug_package 0 + %global debug_package %{nil} + %global __os_install_post /usr/lib/rpm/brp-compress %{nil} + + Name: infisical-core + Version: ${{ github.event.inputs.version }} + Release: 1%{?dist} + Summary: Infisical Core standalone executable + License: Proprietary + URL: https://app.infisical.com + + %description + Infisical Core standalone executable (app.infisical.com) + + %install + mkdir -p %{buildroot}/usr/local/bin + cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/ + + %files + /usr/local/bin/infisical-core + + %pre + + %post + + %preun + + %postun + EOF + + # Build .rpm file + - name: Build .rpm package + if: matrix.os == 'linux' + run: | + # Create necessary directories + mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS} + + # Copy the binary directly to SOURCES + cp ./binary/infisical-core rpmbuild/SOURCES/ + + # Run rpmbuild with verbose output + rpmbuild -vv -bb \ + --define "_topdir $(pwd)/rpmbuild" \ + --define "_sourcedir $(pwd)/rpmbuild/SOURCES" \ + --define "_rpmdir $(pwd)/rpmbuild/RPMS" \ + --target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \ + infisical-core.spec + + # Try to find the RPM file + find rpmbuild -name "*.rpm" + + # Move the RPM file if found + if [ -n "$(find rpmbuild -name '*.rpm')" ]; then + mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm + else + echo "RPM file not found!" + exit 1 + fi + + - uses: actions/setup-python@v4 + with: + python-version: "3.x" # Specify the Python version you need + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install --upgrade cloudsmith-cli + + # Publish .deb file to Cloudsmith (Debian/Ubuntu only) + - name: Publish to Cloudsmith (Debian/Ubuntu) + if: matrix.os == 'linux' + working-directory: ./backend + run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb + + # Publish .rpm file to Cloudsmith (Red Hat-based systems only) + - name: Publish .rpm to Cloudsmith + if: matrix.os == 'linux' + working-directory: ./backend + run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm + + # Publish .exe file to Cloudsmith (Windows only) + - name: Publish to Cloudsmith (Windows) + if: matrix.os == 'win' + working-directory: ./backend + run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }} diff --git a/.github/workflows/check-api-for-breaking-changes.yml b/.github/workflows/check-api-for-breaking-changes.yml index dadd6c8605..c4cb6e4519 100644 --- a/.github/workflows/check-api-for-breaking-changes.yml +++ b/.github/workflows/check-api-for-breaking-changes.yml @@ -22,20 +22,20 @@ jobs: # uncomment this when testing locally using nektos/act - uses: KengoTODA/actions-setup-docker-compose@v1 if: ${{ env.ACT }} - name: Install `docker-compose` for local simulations + name: Install `docker compose` for local simulations with: version: "2.14.2" - name: 📦Build the latest image run: docker build --tag infisical-api . working-directory: backend - name: Start postgres and redis - run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis + run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis - name: Start the server run: | echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env - docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs" + docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs" env: REDIS_URL: redis://172.17.0.1:6379 DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable @@ -47,7 +47,7 @@ jobs: - name: Wait for container to be stable and check logs run: | SECONDS=0 - r HEALTHY=0 + HEALTHY=0 while [ $SECONDS -lt 60 ]; do if docker ps | grep infisical-api | grep -q healthy; then echo "Container is healthy." @@ -72,6 +72,6 @@ jobs: run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR - name: cleanup run: | - docker-compose -f "docker-compose.dev.yml" down + docker compose -f "docker-compose.dev.yml" down docker stop infisical-api docker remove infisical-api diff --git a/.github/workflows/check-migration-file-edited.yml b/.github/workflows/check-migration-file-edited.yml new file mode 100644 index 0000000000..e94a573c61 --- /dev/null +++ b/.github/workflows/check-migration-file-edited.yml @@ -0,0 +1,25 @@ +name: Check migration file edited + +on: + pull_request: + types: [opened, synchronize] + paths: + - 'backend/src/db/migrations/**' + +jobs: + rename: + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check any migration files are modified, renamed or duplicated. + run: | + git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt + if [ -s edited_files.txt ]; then + echo "Exiting migration files cannot be modified." + cat edited_files.txt + exit 1 + fi diff --git a/.github/workflows/build-staging-and-deploy-aws.yml b/.github/workflows/deployment-pipeline.yml similarity index 60% rename from .github/workflows/build-staging-and-deploy-aws.yml rename to .github/workflows/deployment-pipeline.yml index a9b2046ae6..70fe640881 100644 --- a/.github/workflows/build-staging-and-deploy-aws.yml +++ b/.github/workflows/deployment-pipeline.yml @@ -6,9 +6,15 @@ permissions: contents: read jobs: + infisical-tests: + name: Integration tests + # https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview + uses: ./.github/workflows/run-backend-tests.yml + infisical-image: - name: Build backend image + name: Build runs-on: ubuntu-latest + needs: [infisical-tests] steps: - name: ☁️ Checkout source uses: actions/checkout@v3 @@ -50,6 +56,13 @@ jobs: environment: name: Gamma steps: + - uses: twingate/github-action@v1 + with: + # The Twingate Service Key used to connect Twingate to the proper service + # Learn more about [Twingate Services](https://docs.twingate.com/docs/services) + # + # Required + service-key: ${{ secrets.TWINGATE_SERVICE_KEY }} - name: Checkout code uses: actions/checkout@v2 - name: Setup Node.js environment @@ -74,30 +87,37 @@ jobs: uses: pr-mpt/actions-commit-hash@v2 - name: Download task definition run: | - aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json + aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json - name: Render Amazon ECS task definition id: render-web-container uses: aws-actions/amazon-ecs-render-task-definition@v1 with: task-definition: task-definition.json - container-name: infisical-core-platform + container-name: infisical-core image: infisical/staging_infisical:${{ steps.commit.outputs.short }} environment-variables: "LOG_LEVEL=info" - name: Deploy to Amazon ECS service uses: aws-actions/amazon-ecs-deploy-task-definition@v1 with: task-definition: ${{ steps.render-web-container.outputs.task-definition }} - service: infisical-core-platform - cluster: infisical-core-platform + service: infisical-core-gamma-stage + cluster: infisical-gamma-stage wait-for-service-stability: true - production-postgres-deployment: - name: Deploy to production + production-us: + name: US production deploy runs-on: ubuntu-latest needs: [gamma-deployment] environment: name: Production steps: + - uses: twingate/github-action@v1 + with: + # The Twingate Service Key used to connect Twingate to the proper service + # Learn more about [Twingate Services](https://docs.twingate.com/docs/services) + # + # Required + service-key: ${{ secrets.TWINGATE_SERVICE_KEY }} - name: Checkout code uses: actions/checkout@v2 - name: Setup Node.js environment @@ -107,6 +127,7 @@ jobs: - name: Change directory to backend and install dependencies env: DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }} + AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }} run: | cd backend npm install @@ -138,3 +159,54 @@ jobs: service: infisical-core-platform cluster: infisical-core-platform wait-for-service-stability: true + + production-eu: + name: EU production deploy + runs-on: ubuntu-latest + needs: [production-us] + environment: + name: production-eu + steps: + - uses: twingate/github-action@v1 + with: + service-key: ${{ secrets.TWINGATE_SERVICE_KEY }} + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + audience: sts.amazonaws.com + aws-region: eu-central-1 + role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment + - name: Checkout code + uses: actions/checkout@v2 + - name: Setup Node.js environment + uses: actions/setup-node@v2 + with: + node-version: "20" + - name: Change directory to backend and install dependencies + env: + DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }} + run: | + cd backend + npm install + npm run migration:latest + - name: Save commit hashes for tag + id: commit + uses: pr-mpt/actions-commit-hash@v2 + - name: Download task definition + run: | + aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json + - name: Render Amazon ECS task definition + id: render-web-container + uses: aws-actions/amazon-ecs-render-task-definition@v1 + with: + task-definition: task-definition.json + container-name: infisical-core-platform + image: infisical/staging_infisical:${{ steps.commit.outputs.short }} + environment-variables: "LOG_LEVEL=info" + - name: Deploy to Amazon ECS service + uses: aws-actions/amazon-ecs-deploy-task-definition@v1 + with: + task-definition: ${{ steps.render-web-container.outputs.task-definition }} + service: infisical-core-platform + cluster: infisical-core-platform + wait-for-service-stability: true diff --git a/.github/workflows/release-standalone-docker-img-postgres-offical.yml b/.github/workflows/release-standalone-docker-img-postgres-offical.yml index f08e882aaa..7a73288cb1 100644 --- a/.github/workflows/release-standalone-docker-img-postgres-offical.yml +++ b/.github/workflows/release-standalone-docker-img-postgres-offical.yml @@ -1,62 +1,115 @@ name: Release standalone docker image on: - push: - tags: - - "infisical/v*.*.*-postgres" + push: + tags: + - "infisical/v*.*.*-postgres" jobs: - infisical-tests: - name: Run tests before deployment - # https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview - uses: ./.github/workflows/run-backend-tests.yml - infisical-standalone: - name: Build infisical standalone image postgres - runs-on: ubuntu-latest - needs: [infisical-tests] - steps: - - name: Extract version from tag - id: extract_version - run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}" - - name: ☁️ Checkout source - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: 📦 Install dependencies to test all dependencies - run: npm ci --only-production - working-directory: backend - - name: version output - run: | - echo "Output Value: ${{ steps.version.outputs.major }}" - echo "Output Value: ${{ steps.version.outputs.minor }}" - echo "Output Value: ${{ steps.version.outputs.patch }}" - echo "Output Value: ${{ steps.version.outputs.version }}" - echo "Output Value: ${{ steps.version.outputs.version_type }}" - echo "Output Value: ${{ steps.version.outputs.increment }}" - - name: Save commit hashes for tag - id: commit - uses: pr-mpt/actions-commit-hash@v2 - - name: 🔧 Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: 🐋 Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up Depot CLI - uses: depot/setup-action@v1 - - name: 📦 Build backend and export to Docker - uses: depot/build-push-action@v1 - with: - project: 64mmf0n610 - token: ${{ secrets.DEPOT_PROJECT_TOKEN }} - push: true - context: . - tags: | - infisical/infisical:latest-postgres - infisical/infisical:${{ steps.commit.outputs.short }} - infisical/infisical:${{ steps.extract_version.outputs.version }} - platforms: linux/amd64,linux/arm64 - file: Dockerfile.standalone-infisical - build-args: | - POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }} - INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }} + infisical-tests: + name: Run tests before deployment + # https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview + uses: ./.github/workflows/run-backend-tests.yml + + infisical-standalone: + name: Build infisical standalone image postgres + runs-on: ubuntu-latest + needs: [infisical-tests] + steps: + - name: Extract version from tag + id: extract_version + run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}" + - name: ☁️ Checkout source + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: 📦 Install dependencies to test all dependencies + run: npm ci --only-production + working-directory: backend + - name: version output + run: | + echo "Output Value: ${{ steps.version.outputs.major }}" + echo "Output Value: ${{ steps.version.outputs.minor }}" + echo "Output Value: ${{ steps.version.outputs.patch }}" + echo "Output Value: ${{ steps.version.outputs.version }}" + echo "Output Value: ${{ steps.version.outputs.version_type }}" + echo "Output Value: ${{ steps.version.outputs.increment }}" + - name: Save commit hashes for tag + id: commit + uses: pr-mpt/actions-commit-hash@v2 + - name: 🔧 Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: 🐋 Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Depot CLI + uses: depot/setup-action@v1 + - name: 📦 Build backend and export to Docker + uses: depot/build-push-action@v1 + with: + project: 64mmf0n610 + token: ${{ secrets.DEPOT_PROJECT_TOKEN }} + push: true + context: . + tags: | + infisical/infisical:latest-postgres + infisical/infisical:${{ steps.commit.outputs.short }} + infisical/infisical:${{ steps.extract_version.outputs.version }} + platforms: linux/amd64,linux/arm64 + file: Dockerfile.standalone-infisical + build-args: | + POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }} + INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }} + + infisical-fips-standalone: + name: Build infisical standalone image postgres + runs-on: ubuntu-latest + needs: [infisical-tests] + steps: + - name: Extract version from tag + id: extract_version + run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}" + - name: ☁️ Checkout source + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: 📦 Install dependencies to test all dependencies + run: npm ci --only-production + working-directory: backend + - name: version output + run: | + echo "Output Value: ${{ steps.version.outputs.major }}" + echo "Output Value: ${{ steps.version.outputs.minor }}" + echo "Output Value: ${{ steps.version.outputs.patch }}" + echo "Output Value: ${{ steps.version.outputs.version }}" + echo "Output Value: ${{ steps.version.outputs.version_type }}" + echo "Output Value: ${{ steps.version.outputs.increment }}" + - name: Save commit hashes for tag + id: commit + uses: pr-mpt/actions-commit-hash@v2 + - name: 🔧 Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: 🐋 Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Depot CLI + uses: depot/setup-action@v1 + - name: 📦 Build backend and export to Docker + uses: depot/build-push-action@v1 + with: + project: 64mmf0n610 + token: ${{ secrets.DEPOT_PROJECT_TOKEN }} + push: true + context: . + tags: | + infisical/infisical-fips:latest-postgres + infisical/infisical-fips:${{ steps.commit.outputs.short }} + infisical/infisical-fips:${{ steps.extract_version.outputs.version }} + platforms: linux/amd64,linux/arm64 + file: Dockerfile.fips.standalone-infisical + build-args: | + POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }} + INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }} diff --git a/.github/workflows/release_build_infisical_cli.yml b/.github/workflows/release_build_infisical_cli.yml index e4a5945e04..3a3b384f39 100644 --- a/.github/workflows/release_build_infisical_cli.yml +++ b/.github/workflows/release_build_infisical_cli.yml @@ -10,8 +10,7 @@ on: permissions: contents: write - # packages: write - # issues: write + jobs: cli-integration-tests: name: Run tests before deployment @@ -22,6 +21,66 @@ jobs: CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }} CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }} CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }} + CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }} + CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }} + CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }} + + npm-release: + runs-on: ubuntu-20.04 + env: + working-directory: ./npm + needs: + - cli-integration-tests + - goreleaser + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Extract version + run: | + VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//') + echo "Version extracted: $VERSION" + echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV + + - name: Print version + run: echo ${{ env.CLI_VERSION }} + + - name: Setup Node + uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + with: + node-version: 20 + cache: "npm" + cache-dependency-path: ./npm/package-lock.json + - name: Install dependencies + working-directory: ${{ env.working-directory }} + run: npm install --ignore-scripts + + - name: Set NPM version + working-directory: ${{ env.working-directory }} + run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version + + - name: Setup NPM + working-directory: ${{ env.working-directory }} + run: | + echo 'registry="https://registry.npmjs.org/"' > ./.npmrc + echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc + + echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc + echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + + - name: Pack NPM + working-directory: ${{ env.working-directory }} + run: npm pack + + - name: Publish NPM + working-directory: ${{ env.working-directory }} + run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/ + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} goreleaser: runs-on: ubuntu-20.04 @@ -56,7 +115,7 @@ jobs: - uses: goreleaser/goreleaser-action@v4 with: distribution: goreleaser-pro - version: latest + version: v1.26.2-pro args: release --clean env: GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }} diff --git a/.github/workflows/run-backend-tests.yml b/.github/workflows/run-backend-tests.yml index edb58f9a67..1fc9deff67 100644 --- a/.github/workflows/run-backend-tests.yml +++ b/.github/workflows/run-backend-tests.yml @@ -20,7 +20,7 @@ jobs: uses: actions/checkout@v3 - uses: KengoTODA/actions-setup-docker-compose@v1 if: ${{ env.ACT }} - name: Install `docker-compose` for local simulations + name: Install `docker compose` for local simulations with: version: "2.14.2" - name: 🔧 Setup Node 20 @@ -33,7 +33,7 @@ jobs: run: npm install working-directory: backend - name: Start postgres and redis - run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis + run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis - name: Start integration test run: npm run test:e2e working-directory: backend @@ -44,4 +44,4 @@ jobs: ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218 - name: cleanup run: | - docker-compose -f "docker-compose.dev.yml" down \ No newline at end of file + docker compose -f "docker-compose.dev.yml" down \ No newline at end of file diff --git a/.github/workflows/run-cli-tests.yml b/.github/workflows/run-cli-tests.yml index e814f9143f..da6f507a7e 100644 --- a/.github/workflows/run-cli-tests.yml +++ b/.github/workflows/run-cli-tests.yml @@ -20,7 +20,12 @@ on: required: true CLI_TESTS_ENV_SLUG: required: true - + CLI_TESTS_USER_EMAIL: + required: true + CLI_TESTS_USER_PASSWORD: + required: true + CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: + required: true jobs: test: defaults: @@ -43,5 +48,8 @@ jobs: CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }} CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }} CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }} + CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }} + CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }} + # INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }} run: go test -v -count=1 ./test diff --git a/.github/workflows/update-be-new-migration-latest-timestamp.yml b/.github/workflows/update-be-new-migration-latest-timestamp.yml deleted file mode 100644 index 684c786541..0000000000 --- a/.github/workflows/update-be-new-migration-latest-timestamp.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Rename Migrations - -on: - pull_request: - types: [closed] - paths: - - 'backend/src/db/migrations/**' - -jobs: - rename: - runs-on: ubuntu-latest - if: github.event.pull_request.merged == true - - steps: - - name: Check out repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Get list of newly added files in migration folder - run: | - git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt - if [ ! -s added_files.txt ]; then - echo "No new files added. Skipping" - echo "SKIP_RENAME=true" >> $GITHUB_ENV - fi - - - name: Script to rename migrations - if: env.SKIP_RENAME != 'true' - run: python .github/resources/rename_migration_files.py - - - name: Commit and push changes - if: env.SKIP_RENAME != 'true' - run: | - git config user.name github-actions - git config user.email github-actions@github.com - git add ./backend/src/db/migrations - rm added_files.txt - git commit -m "chore: renamed new migration files to latest timestamp (gh-action)" - - - name: Get PR details - id: pr_details - run: | - PR_NUMBER=${{ github.event.pull_request.number }} - PR_MERGER=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.merged_by.login') - - echo "PR Number: $PR_NUMBER" - echo "PR Merger: $PR_MERGER" - echo "pr_merger=$PR_MERGER" >> $GITHUB_OUTPUT - - - name: Create Pull Request - if: env.SKIP_RENAME != 'true' - uses: peter-evans/create-pull-request@v6 - with: - token: ${{ secrets.GITHUB_TOKEN }} - commit-message: 'chore: renamed new migration files to latest UTC (gh-action)' - title: 'GH Action: rename new migration file timestamp' - branch-suffix: timestamp - reviewers: ${{ steps.pr_details.outputs.pr_merger }} diff --git a/.gitignore b/.gitignore index b048600710..f2a23324b8 100644 --- a/.gitignore +++ b/.gitignore @@ -63,9 +63,13 @@ yarn-error.log* # Editor specific .vscode/* +.idea/* frontend-build *.tgz cli/infisical-merge cli/test/infisical-merge +/backend/binary + +/npm/bin diff --git a/.infisicalignore b/.infisicalignore index 855047fe4c..4c19af70c5 100644 --- a/.infisicalignore +++ b/.infisicalignore @@ -5,3 +5,5 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/M frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292 docs/self-hosting/configuration/envars.mdx:generic-api-key:106 frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451 +docs/mint.json:generic-api-key:651 +backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134 diff --git a/Dockerfile.fips.standalone-infisical b/Dockerfile.fips.standalone-infisical new file mode 100644 index 0000000000..dfcb87deb3 --- /dev/null +++ b/Dockerfile.fips.standalone-infisical @@ -0,0 +1,167 @@ +ARG POSTHOG_HOST=https://app.posthog.com +ARG POSTHOG_API_KEY=posthog-api-key +ARG INTERCOM_ID=intercom-id +ARG CAPTCHA_SITE_KEY=captcha-site-key + +FROM node:20-slim AS base + +FROM base AS frontend-dependencies +WORKDIR /app + +COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./ + +# Install dependencies +RUN npm ci --only-production --ignore-scripts + +# Rebuild the source code only when needed +FROM base AS frontend-builder +WORKDIR /app + +# Copy dependencies +COPY --from=frontend-dependencies /app/node_modules ./node_modules +# Copy all files +COPY /frontend . + +ENV NODE_ENV production +ENV NEXT_PUBLIC_ENV production +ARG POSTHOG_HOST +ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST +ARG POSTHOG_API_KEY +ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY +ARG INTERCOM_ID +ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID +ARG INFISICAL_PLATFORM_VERSION +ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION +ARG CAPTCHA_SITE_KEY +ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY + +# Build +RUN npm run build + +# Production image +FROM base AS frontend-runner +WORKDIR /app + +RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user + +RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images +VOLUME /app/.next/cache/images + +COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts +COPY --from=frontend-builder /app/public ./public +RUN chown non-root-user:nodejs ./public/data + +COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./ +COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static + +USER non-root-user + +ENV NEXT_TELEMETRY_DISABLED 1 + +## +## BACKEND +## +FROM base AS backend-build + +ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ + +RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user + +WORKDIR /app + +# Required for pkcs11js +RUN apt-get update && apt-get install -y \ + python3 \ + make \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +COPY backend/package*.json ./ +RUN npm ci --only-production + +COPY /backend . +COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh +RUN npm i -D tsconfig-paths +RUN npm run build + +# Production stage +FROM base AS backend-runner + +ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ + +WORKDIR /app + +# Required for pkcs11js +RUN apt-get update && apt-get install -y \ + python3 \ + make \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +COPY backend/package*.json ./ +RUN npm ci --only-production + +COPY --from=backend-build /app . + +RUN mkdir frontend-build + +# Production stage +FROM base AS production + +# Install necessary packages +RUN apt-get update && apt-get install -y \ + ca-certificates \ + curl \ + git \ + && rm -rf /var/lib/apt/lists/* + +# Install Infisical CLI +RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \ + && apt-get update && apt-get install -y infisical=0.31.1 \ + && rm -rf /var/lib/apt/lists/* + +RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user + +# Give non-root-user permission to update SSL certs +RUN chown -R non-root-user /etc/ssl/certs +RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt +RUN chmod -R u+rwx /etc/ssl/certs +RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt +RUN chown non-root-user /usr/sbin/update-ca-certificates +RUN chmod u+rx /usr/sbin/update-ca-certificates + +## set pre baked keys +ARG POSTHOG_API_KEY +ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \ + BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY +ARG INTERCOM_ID=intercom-id +ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \ + BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID +ARG CAPTCHA_SITE_KEY +ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \ + BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY + +WORKDIR / + +COPY --from=backend-runner /app /backend + +COPY --from=frontend-runner /app ./backend/frontend-build + +ENV PORT 8080 +ENV HOST=0.0.0.0 +ENV HTTPS_ENABLED false +ENV NODE_ENV production +ENV STANDALONE_BUILD true +ENV STANDALONE_MODE true +ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ + +WORKDIR /backend + +ENV TELEMETRY_ENABLED true + +EXPOSE 8080 +EXPOSE 443 + +USER non-root-user + +CMD ["./standalone-entrypoint.sh"] \ No newline at end of file diff --git a/Dockerfile.standalone-infisical b/Dockerfile.standalone-infisical index 8ffe7e3dea..41c898b793 100644 --- a/Dockerfile.standalone-infisical +++ b/Dockerfile.standalone-infisical @@ -72,6 +72,9 @@ RUN addgroup --system --gid 1001 nodejs \ WORKDIR /app +# Required for pkcs11js +RUN apk add --no-cache python3 make g++ + COPY backend/package*.json ./ RUN npm ci --only-production @@ -85,6 +88,9 @@ FROM base AS backend-runner WORKDIR /app +# Required for pkcs11js +RUN apk add --no-cache python3 make g++ + COPY backend/package*.json ./ RUN npm ci --only-production @@ -95,6 +101,10 @@ RUN mkdir frontend-build # Production stage FROM base AS production RUN apk add --upgrade --no-cache ca-certificates +RUN apk add --no-cache bash curl && curl -1sLf \ + 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \ + && apk add infisical=0.31.1 && apk add --no-cache git + RUN addgroup --system --gid 1001 nodejs \ && adduser --system --uid 1001 non-root-user diff --git a/Makefile b/Makefile index 11143162ee..aec2dad745 100644 --- a/Makefile +++ b/Makefile @@ -15,3 +15,16 @@ up-prod: down: docker compose -f docker-compose.dev.yml down + +reviewable-ui: + cd frontend && \ + npm run lint:fix && \ + npm run type:check + +reviewable-api: + cd backend && \ + npm run lint:fix && \ + npm run type:check + +reviewable: reviewable-ui reviewable-api + diff --git a/README.md b/README.md index a5ed5c6213..d68481428e 100644 --- a/README.md +++ b/README.md @@ -48,35 +48,52 @@ ## Introduction -**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations. +**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI. -We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up. +We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up. ## Features -- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.). -- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand. -- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD. -- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical. -- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more. -- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more. -- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic. -- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data. -- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state. -- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project. -- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities. -- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more. -- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git. +### Secrets Management: -And much more. +- **[Dashboard](https://infisical.com/docs/documentation/platform/project)**: Manage secrets across projects and environments (e.g. development, production, etc.) through a user-friendly interface. +- **[Native Integrations](https://infisical.com/docs/integrations/overview)**: Sync secrets to platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and use tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more. +- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)**: Keep track of every secret and project state; roll back when needed. +- **[Secret Rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview)**: Rotate secrets at regular intervals for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/secret-rotation/postgres), [MySQL](https://infisical.com/docs/documentation/platform/secret-rotation/mysql), [AWS IAM](https://infisical.com/docs/documentation/platform/secret-rotation/aws-iam), and more. +- **[Dynamic Secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview)**: Generate ephemeral secrets on-demand for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/postgresql), [MySQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/mysql), [RabbitMQ](https://infisical.com/docs/documentation/platform/dynamic-secrets/rabbit-mq), and more. +- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)**: Prevent secrets from leaking to git. +- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments. +- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic. + +### Internal PKI: + +- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates. +- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL. +- **[Alerting](https://infisical.com/docs/documentation/platform/pki/alerting)**: Configure alerting for expiring CA and end-entity certificates. +- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal. +- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol. + +### Key Management (KMS): + +- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API. +- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data. + +### General Platform: +- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)). +- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more. +- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform. +- **[Self-hosting](https://infisical.com/docs/self-hosting/overview)**: Deploy Infisical on-prem or cloud with ease; keep data on your own infrastructure. +- **[Infisical SDK](https://infisical.com/docs/sdks/overview)**: Interact with Infisical via client SDKs ([Node](https://infisical.com/docs/sdks/languages/node), [Python](https://github.com/Infisical/python-sdk-official?tab=readme-ov-file#infisical-python-sdk), [Go](https://infisical.com/docs/sdks/languages/go), [Ruby](https://infisical.com/docs/sdks/languages/ruby), [Java](https://infisical.com/docs/sdks/languages/java), [.NET](https://infisical.com/docs/sdks/languages/csharp)) +- **[Infisical CLI](https://infisical.com/docs/cli/overview)**: Interact with Infisical via CLI; useful for injecting secrets into local development and CI/CD pipelines. +- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)**: Interact with Infisical via API. ## Getting started Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction) -| Use Infisical Cloud | Deploy Infisical on premise | -| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| The fastest and most reliable way to
get started with Infisical is signing up
for free to [Infisical Cloud](https://app.infisical.com/login). |
View all [deployment options](https://infisical.com/docs/self-hosting/overview) | +| Use Infisical Cloud | Deploy Infisical on premise | +| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ | +| The fastest and most reliable way to
get started with Infisical is signing up
for free to [Infisical Cloud](https://app.infisical.com/login). |
View all [deployment options](https://infisical.com/docs/self-hosting/overview) | ### Run Infisical locally @@ -118,9 +135,7 @@ Lean about Infisical's code scanning feature [here](https://infisical.com/docs/c This repo available under the [MIT expat license](https://github.com/Infisical/infisical/blob/main/LICENSE), with the exception of the `ee` directory which will contain premium enterprise features requiring a Infisical license. -If you are interested in managed Infisical Cloud of self-hosted Enterprise Offering, take a look at [our website](https://infisical.com/) or [book a meeting with us](https://infisical.cal.com/vlad/infisical-demo): - -Schedule a meeting +If you are interested in managed Infisical Cloud of self-hosted Enterprise Offering, take a look at [our website](https://infisical.com/) or [book a meeting with us](https://infisical.cal.com/vlad/infisical-demo). ## Security @@ -137,7 +152,6 @@ Whether it's big or small, we love contributions. Check out our guide to see how Not sure where to get started? You can: - Join our Slack, and ask us any questions there. -- Join our [community calls](https://us06web.zoom.us/j/82623506356) every Wednesday at 11am EST to ask any questions, provide feedback, hangout and more. ## Resources @@ -147,37 +161,3 @@ Not sure where to get started? You can: - [Twitter](https://twitter.com/infisical) for fast news - [YouTube](https://www.youtube.com/@infisical_os) for videos on secret management - [Blog](https://infisical.com/blog) for secret management insights, articles, tutorials, and updates -- [Roadmap](https://www.notion.so/infisical/be2d2585a6694e40889b03aef96ea36b?v=5b19a8127d1a4060b54769567a8785fa) for planned features - -## Acknowledgements - -[//]: contributor-faces - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/backend/Dockerfile b/backend/Dockerfile index 2153ba33a1..5822649461 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -3,6 +3,12 @@ FROM node:20-alpine AS build WORKDIR /app +# Required for pkcs11js +RUN apk --update add \ + python3 \ + make \ + g++ + COPY package*.json ./ RUN npm ci --only-production @@ -11,12 +17,17 @@ RUN npm run build # Production stage FROM node:20-alpine - WORKDIR /app ENV npm_config_cache /home/node/.npm COPY package*.json ./ + +RUN apk --update add \ + python3 \ + make \ + g++ + RUN npm ci --only-production && npm cache clean --force COPY --from=build /app . diff --git a/backend/Dockerfile.dev b/backend/Dockerfile.dev index ec34f63d7b..97bc2c6a36 100644 --- a/backend/Dockerfile.dev +++ b/backend/Dockerfile.dev @@ -1,5 +1,44 @@ FROM node:20-alpine +# ? Setup a test SoftHSM module. In production a real HSM is used. + +ARG SOFTHSM2_VERSION=2.5.0 + +ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \ + SOFTHSM2_SOURCES=/tmp/softhsm2 + +# install build dependencies including python3 +RUN apk --update add \ + alpine-sdk \ + autoconf \ + automake \ + git \ + libtool \ + openssl-dev \ + python3 \ + make \ + g++ + +# build and install SoftHSM2 +RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES} +WORKDIR ${SOFTHSM2_SOURCES} + +RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \ + && sh autogen.sh \ + && ./configure --prefix=/usr/local --disable-gost \ + && make \ + && make install + +WORKDIR /root +RUN rm -fr ${SOFTHSM2_SOURCES} + +# install pkcs11-tool +RUN apk --update add opensc + +RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000 + +# ? App setup + RUN apk add --no-cache bash curl && curl -1sLf \ 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \ && apk add infisical=0.8.1 && apk add --no-cache git diff --git a/backend/babel.config.json b/backend/babel.config.json new file mode 100644 index 0000000000..59480d0848 --- /dev/null +++ b/backend/babel.config.json @@ -0,0 +1,4 @@ +{ + "presets": ["@babel/preset-env", "@babel/preset-react"], + "plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"] +} diff --git a/backend/e2e-test/routes/v1/identity.spec.ts b/backend/e2e-test/routes/v1/identity.spec.ts index ccb530c796..f1cc6cf054 100644 --- a/backend/e2e-test/routes/v1/identity.spec.ts +++ b/backend/e2e-test/routes/v1/identity.spec.ts @@ -34,7 +34,7 @@ describe("Identity v1", async () => { test("Create identity", async () => { const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin); expect(newIdentity.name).toBe("mac1"); - expect(newIdentity.authMethod).toBeNull(); + expect(newIdentity.authMethods).toEqual([]); await deleteIdentity(newIdentity.id); }); @@ -42,7 +42,7 @@ describe("Identity v1", async () => { test("Update identity", async () => { const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin); expect(newIdentity.name).toBe("mac1"); - expect(newIdentity.authMethod).toBeNull(); + expect(newIdentity.authMethods).toEqual([]); const updatedIdentity = await testServer.inject({ method: "PATCH", diff --git a/backend/e2e-test/routes/v1/login.spec.ts b/backend/e2e-test/routes/v1/login.spec.ts index cd6ec31947..0b2123f4e2 100644 --- a/backend/e2e-test/routes/v1/login.spec.ts +++ b/backend/e2e-test/routes/v1/login.spec.ts @@ -39,8 +39,6 @@ describe("Login V1 Router", async () => { }); expect(res.statusCode).toBe(200); const payload = JSON.parse(res.payload); - expect(payload).toHaveProperty("mfaEnabled"); expect(payload).toHaveProperty("token"); - expect(payload.mfaEnabled).toBeFalsy(); }); }); diff --git a/backend/e2e-test/routes/v1/project-env.spec.ts b/backend/e2e-test/routes/v1/project-env.spec.ts index ec06d64748..0726f3a507 100644 --- a/backend/e2e-test/routes/v1/project-env.spec.ts +++ b/backend/e2e-test/routes/v1/project-env.spec.ts @@ -123,7 +123,7 @@ describe("Project Environment Router", async () => { id: deletedProjectEnvironment.id, name: mockProjectEnv.name, slug: mockProjectEnv.slug, - position: 4, + position: 5, createdAt: expect.any(String), updatedAt: expect.any(String) }) diff --git a/backend/e2e-test/routes/v1/secret-approval-policy.spec.ts b/backend/e2e-test/routes/v1/secret-approval-policy.spec.ts new file mode 100644 index 0000000000..6244cf735a --- /dev/null +++ b/backend/e2e-test/routes/v1/secret-approval-policy.spec.ts @@ -0,0 +1,36 @@ +import { seedData1 } from "@app/db/seed-data"; +import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types"; + +const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => { + const res = await testServer.inject({ + method: "POST", + url: `/api/v1/secret-approvals`, + headers: { + authorization: `Bearer ${jwtAuthToken}` + }, + body: { + workspaceId: seedData1.project.id, + environment: seedData1.environment.slug, + name: dto.name, + secretPath: dto.secretPath, + approvers: dto.approvers, + approvals: dto.approvals + } + }); + + expect(res.statusCode).toBe(200); + return res.json().approval; +}; + +describe("Secret approval policy router", async () => { + test("Create policy", async () => { + const policy = await createPolicy({ + secretPath: "/", + approvals: 1, + approvers: [{id:seedData1.id, type: ApproverType.User}], + name: "test-policy" + }); + + expect(policy.name).toBe("test-policy"); + }); +}); diff --git a/backend/e2e-test/routes/v1/secret-import.spec.ts b/backend/e2e-test/routes/v1/secret-import.spec.ts index c184e44e5b..dd4f9c4ca2 100644 --- a/backend/e2e-test/routes/v1/secret-import.spec.ts +++ b/backend/e2e-test/routes/v1/secret-import.spec.ts @@ -1,73 +1,61 @@ +import { createFolder, deleteFolder } from "e2e-test/testUtils/folders"; +import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports"; +import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets"; + import { seedData1 } from "@app/db/seed-data"; -const createSecretImport = async (importPath: string, importEnv: string) => { - const res = await testServer.inject({ - method: "POST", - url: `/api/v1/secret-imports`, - headers: { - authorization: `Bearer ${jwtAuthToken}` - }, - body: { - workspaceId: seedData1.project.id, - environment: seedData1.environment.slug, - path: "/", - import: { - environment: importEnv, - path: importPath - } - } - }); - - expect(res.statusCode).toBe(200); - const payload = JSON.parse(res.payload); - expect(payload).toHaveProperty("secretImport"); - return payload.secretImport; -}; - -const deleteSecretImport = async (id: string) => { - const res = await testServer.inject({ - method: "DELETE", - url: `/api/v1/secret-imports/${id}`, - headers: { - authorization: `Bearer ${jwtAuthToken}` - }, - body: { - workspaceId: seedData1.project.id, - environment: seedData1.environment.slug, - path: "/" - } - }); - - expect(res.statusCode).toBe(200); - const payload = JSON.parse(res.payload); - expect(payload).toHaveProperty("secretImport"); - return payload.secretImport; -}; - describe("Secret Import Router", async () => { test.each([ { importEnv: "prod", importPath: "/" }, // one in root { importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones ])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => { // check for default environments - const payload = await createSecretImport(importPath, importEnv); + const payload = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: "/", + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.project.id, + importPath, + importEnv + }); expect(payload).toEqual( expect.objectContaining({ id: expect.any(String), - importPath: expect.any(String), + importPath, importEnv: expect.objectContaining({ name: expect.any(String), - slug: expect.any(String), + slug: importEnv, id: expect.any(String) }) }) ); - await deleteSecretImport(payload.id); + + await deleteSecretImport({ + id: payload.id, + workspaceId: seedData1.project.id, + environmentSlug: seedData1.environment.slug, + secretPath: "/", + authToken: jwtAuthToken + }); }); test("Get secret imports", async () => { - const createdImport1 = await createSecretImport("/", "prod"); - const createdImport2 = await createSecretImport("/", "staging"); + const createdImport1 = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: "/", + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.project.id, + importPath: "/", + importEnv: "prod" + }); + const createdImport2 = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: "/", + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.project.id, + importPath: "/", + importEnv: "staging" + }); const res = await testServer.inject({ method: "GET", url: `/api/v1/secret-imports`, @@ -89,25 +77,60 @@ describe("Secret Import Router", async () => { expect.arrayContaining([ expect.objectContaining({ id: expect.any(String), - importPath: expect.any(String), + importPath: "/", importEnv: expect.objectContaining({ name: expect.any(String), - slug: expect.any(String), + slug: "prod", + id: expect.any(String) + }) + }), + expect.objectContaining({ + id: expect.any(String), + importPath: "/", + importEnv: expect.objectContaining({ + name: expect.any(String), + slug: "staging", id: expect.any(String) }) }) ]) ); - await deleteSecretImport(createdImport1.id); - await deleteSecretImport(createdImport2.id); + await deleteSecretImport({ + id: createdImport1.id, + workspaceId: seedData1.project.id, + environmentSlug: seedData1.environment.slug, + secretPath: "/", + authToken: jwtAuthToken + }); + await deleteSecretImport({ + id: createdImport2.id, + workspaceId: seedData1.project.id, + environmentSlug: seedData1.environment.slug, + secretPath: "/", + authToken: jwtAuthToken + }); }); test("Update secret import position", async () => { const prodImportDetails = { path: "/", envSlug: "prod" }; const stagingImportDetails = { path: "/", envSlug: "staging" }; - const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug); - const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug); + const createdImport1 = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: "/", + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.project.id, + importPath: prodImportDetails.path, + importEnv: prodImportDetails.envSlug + }); + const createdImport2 = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: "/", + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.project.id, + importPath: stagingImportDetails.path, + importEnv: stagingImportDetails.envSlug + }); const updateImportRes = await testServer.inject({ method: "PATCH", @@ -161,22 +184,55 @@ describe("Secret Import Router", async () => { expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id); expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id); - await deleteSecretImport(createdImport1.id); - await deleteSecretImport(createdImport2.id); + await deleteSecretImport({ + id: createdImport1.id, + workspaceId: seedData1.project.id, + environmentSlug: seedData1.environment.slug, + secretPath: "/", + authToken: jwtAuthToken + }); + await deleteSecretImport({ + id: createdImport2.id, + workspaceId: seedData1.project.id, + environmentSlug: seedData1.environment.slug, + secretPath: "/", + authToken: jwtAuthToken + }); }); test("Delete secret import position", async () => { - const createdImport1 = await createSecretImport("/", "prod"); - const createdImport2 = await createSecretImport("/", "staging"); - const deletedImport = await deleteSecretImport(createdImport1.id); + const createdImport1 = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: "/", + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.project.id, + importPath: "/", + importEnv: "prod" + }); + const createdImport2 = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: "/", + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.project.id, + importPath: "/", + importEnv: "staging" + }); + const deletedImport = await deleteSecretImport({ + id: createdImport1.id, + workspaceId: seedData1.project.id, + environmentSlug: seedData1.environment.slug, + secretPath: "/", + authToken: jwtAuthToken + }); + // check for default environments expect(deletedImport).toEqual( expect.objectContaining({ id: expect.any(String), - importPath: expect.any(String), + importPath: "/", importEnv: expect.objectContaining({ name: expect.any(String), - slug: expect.any(String), + slug: "prod", id: expect.any(String) }) }) @@ -201,6 +257,552 @@ describe("Secret Import Router", async () => { expect(secretImportList.secretImports.length).toEqual(1); expect(secretImportList.secretImports[0].position).toEqual(1); - await deleteSecretImport(createdImport2.id); + await deleteSecretImport({ + id: createdImport2.id, + workspaceId: seedData1.project.id, + environmentSlug: seedData1.environment.slug, + secretPath: "/", + authToken: jwtAuthToken + }); }); }); + +// dev <- stage <- prod +describe.each([{ path: "/" }, { path: "/deep" }])( + "Secret import waterfall pattern testing - %path", + ({ path: testSuitePath }) => { + beforeAll(async () => { + let prodFolder: { id: string }; + let stagingFolder: { id: string }; + let devFolder: { id: string }; + + if (testSuitePath !== "/") { + prodFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + stagingFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + devFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + } + + const devImportFromStage = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "staging" + }); + + const stageImportFromProd = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "prod" + }); + + return async () => { + await deleteSecretImport({ + id: stageImportFromProd.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging", + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + await deleteSecretImport({ + id: devImportFromStage.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + if (prodFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: prodFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "prod" + }); + } + + if (stagingFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: stagingFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging" + }); + } + + if (devFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: devFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug + }); + } + }; + }); + + test("Check one level imported secret exist", async () => { + await createSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY", + value: "stage-value" + }); + + const secret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + + expect(secret.secretKey).toBe("STAGING_KEY"); + expect(secret.secretValue).toBe("stage-value"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "STAGING_KEY", + secretValue: "stage-value" + }) + ]) + }) + ]) + ); + + await deleteSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + }); + + test("Check two level imported secret exist", async () => { + await createSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY", + value: "prod-value" + }); + + const secret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + + expect(secret.secretKey).toBe("PROD_KEY"); + expect(secret.secretValue).toBe("prod-value"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "PROD_KEY", + secretValue: "prod-value" + }) + ]) + }) + ]) + ); + + await deleteSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + }); + } +); + +// dev <- stage, dev <- prod +describe.each([{ path: "/" }, { path: "/deep" }])( + "Secret import multiple destination to one source pattern testing - %path", + ({ path: testSuitePath }) => { + beforeAll(async () => { + let prodFolder: { id: string }; + let stagingFolder: { id: string }; + let devFolder: { id: string }; + + if (testSuitePath !== "/") { + prodFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + stagingFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + devFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + } + + const devImportFromStage = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "staging" + }); + + const devImportFromProd = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "prod" + }); + + return async () => { + await deleteSecretImport({ + id: devImportFromProd.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + await deleteSecretImport({ + id: devImportFromStage.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + if (prodFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: prodFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "prod" + }); + } + + if (stagingFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: stagingFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging" + }); + } + + if (devFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: devFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug + }); + } + }; + }); + + test("Check imported secret exist", async () => { + await createSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY", + value: "stage-value" + }); + + await createSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY", + value: "prod-value" + }); + + const secret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + + expect(secret.secretKey).toBe("STAGING_KEY"); + expect(secret.secretValue).toBe("stage-value"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "STAGING_KEY", + secretValue: "stage-value" + }) + ]) + }), + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "PROD_KEY", + secretValue: "prod-value" + }) + ]) + }) + ]) + ); + + await deleteSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + await deleteSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + }); + } +); + +// dev -> stage, prod +describe.each([{ path: "/" }, { path: "/deep" }])( + "Secret import one source to multiple destination pattern testing - %path", + ({ path: testSuitePath }) => { + beforeAll(async () => { + let prodFolder: { id: string }; + let stagingFolder: { id: string }; + let devFolder: { id: string }; + + if (testSuitePath !== "/") { + prodFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + stagingFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + devFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + } + + const stageImportFromDev = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: seedData1.environment.slug + }); + + const prodImportFromDev = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: seedData1.environment.slug + }); + + return async () => { + await deleteSecretImport({ + id: prodImportFromDev.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "prod", + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + await deleteSecretImport({ + id: stageImportFromDev.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging", + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + if (prodFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: prodFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "prod" + }); + } + + if (stagingFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: stagingFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging" + }); + } + + if (devFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: devFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug + }); + } + }; + }); + + test("Check imported secret exist", async () => { + await createSecretV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY", + value: "stage-value" + }); + + await createSecretV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY", + value: "prod-value" + }); + + const stagingSecret = await getSecretByNameV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + + expect(stagingSecret.secretKey).toBe("STAGING_KEY"); + expect(stagingSecret.secretValue).toBe("stage-value"); + + const prodSecret = await getSecretByNameV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + + expect(prodSecret.secretKey).toBe("PROD_KEY"); + expect(prodSecret.secretValue).toBe("prod-value"); + + await deleteSecretV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + await deleteSecretV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + }); + } +); diff --git a/backend/e2e-test/routes/v1/secret-replication.spec.ts b/backend/e2e-test/routes/v1/secret-replication.spec.ts new file mode 100644 index 0000000000..b56a8f9a04 --- /dev/null +++ b/backend/e2e-test/routes/v1/secret-replication.spec.ts @@ -0,0 +1,406 @@ +import { createFolder, deleteFolder } from "e2e-test/testUtils/folders"; +import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports"; +import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets"; + +import { seedData1 } from "@app/db/seed-data"; + +// dev <- stage <- prod +describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])( + "Secret replication waterfall pattern testing - %secretPath", + ({ secretPath: testSuitePath }) => { + beforeAll(async () => { + let prodFolder: { id: string }; + let stagingFolder: { id: string }; + let devFolder: { id: string }; + + if (testSuitePath !== "/") { + prodFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + stagingFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + devFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + } + + const devImportFromStage = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "staging", + isReplication: true + }); + + const stageImportFromProd = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "prod", + isReplication: true + }); + + return async () => { + await deleteSecretImport({ + id: stageImportFromProd.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging", + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + await deleteSecretImport({ + id: devImportFromStage.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + if (prodFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: prodFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "prod" + }); + } + + if (stagingFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: stagingFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging" + }); + } + + if (devFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: devFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug + }); + } + }; + }); + + test("Check one level imported secret exist", async () => { + await createSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY", + value: "stage-value" + }); + + // wait for 10 second for replication to finish + await new Promise((resolve) => { + setTimeout(resolve, 10000); // time to breathe for db + }); + + const secret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + + expect(secret.secretKey).toBe("STAGING_KEY"); + expect(secret.secretValue).toBe("stage-value"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "STAGING_KEY", + secretValue: "stage-value" + }) + ]) + }) + ]) + ); + + await deleteSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + }); + + test("Check two level imported secret exist", async () => { + await createSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY", + value: "prod-value" + }); + + // wait for 10 second for replication to finish + await new Promise((resolve) => { + setTimeout(resolve, 10000); // time to breathe for db + }); + + const secret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + + expect(secret.secretKey).toBe("PROD_KEY"); + expect(secret.secretValue).toBe("prod-value"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "PROD_KEY", + secretValue: "prod-value" + }) + ]) + }) + ]) + ); + + await deleteSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + }); + }, + { timeout: 30000 } +); + +// dev <- stage, dev <- prod +describe.each([{ path: "/" }, { path: "/deep" }])( + "Secret replication 1-N pattern testing - %path", + ({ path: testSuitePath }) => { + beforeAll(async () => { + let prodFolder: { id: string }; + let stagingFolder: { id: string }; + let devFolder: { id: string }; + + if (testSuitePath !== "/") { + prodFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + stagingFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + + devFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: "/", + name: "deep" + }); + } + + const devImportFromStage = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "staging", + isReplication: true + }); + + const devImportFromProd = await createSecretImport({ + authToken: jwtAuthToken, + secretPath: testSuitePath, + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + importPath: testSuitePath, + importEnv: "prod", + isReplication: true + }); + + return async () => { + await deleteSecretImport({ + id: devImportFromProd.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + await deleteSecretImport({ + id: devImportFromStage.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + + if (prodFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: prodFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "prod" + }); + } + + if (stagingFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: stagingFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: "staging" + }); + } + + if (devFolder) { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: devFolder.id, + workspaceId: seedData1.projectV3.id, + environmentSlug: seedData1.environment.slug + }); + } + }; + }); + + test("Check imported secret exist", async () => { + await createSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY", + value: "stage-value" + }); + + await createSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY", + value: "prod-value" + }); + + // wait for 10 second for replication to finish + await new Promise((resolve) => { + setTimeout(resolve, 10000); // time to breathe for db + }); + + const secret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + + expect(secret.secretKey).toBe("STAGING_KEY"); + expect(secret.secretValue).toBe("stage-value"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken + }); + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "STAGING_KEY", + secretValue: "stage-value" + }) + ]) + }), + expect.objectContaining({ + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "PROD_KEY", + secretValue: "prod-value" + }) + ]) + }) + ]) + ); + + await deleteSecretV2({ + environmentSlug: "staging", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "STAGING_KEY" + }); + await deleteSecretV2({ + environmentSlug: "prod", + workspaceId: seedData1.projectV3.id, + secretPath: testSuitePath, + authToken: jwtAuthToken, + key: "PROD_KEY" + }); + }); + }, + { timeout: 30000 } +); diff --git a/backend/e2e-test/routes/v2/service-token.spec.ts b/backend/e2e-test/routes/v2/service-token.spec.ts index a07eda4b9b..6cc8f6e34f 100644 --- a/backend/e2e-test/routes/v2/service-token.spec.ts +++ b/backend/e2e-test/routes/v2/service-token.spec.ts @@ -510,7 +510,7 @@ describe("Service token fail cases", async () => { authorization: `Bearer ${serviceToken}` } }); - expect(fetchSecrets.statusCode).toBe(401); + expect(fetchSecrets.statusCode).toBe(403); expect(fetchSecrets.json().error).toBe("PermissionDenied"); await deleteServiceToken(); }); @@ -532,7 +532,7 @@ describe("Service token fail cases", async () => { authorization: `Bearer ${serviceToken}` } }); - expect(fetchSecrets.statusCode).toBe(401); + expect(fetchSecrets.statusCode).toBe(403); expect(fetchSecrets.json().error).toBe("PermissionDenied"); await deleteServiceToken(); }); @@ -557,7 +557,7 @@ describe("Service token fail cases", async () => { authorization: `Bearer ${serviceToken}` } }); - expect(writeSecrets.statusCode).toBe(401); + expect(writeSecrets.statusCode).toBe(403); expect(writeSecrets.json().error).toBe("PermissionDenied"); // but read access should still work fine diff --git a/backend/e2e-test/routes/v3/secret-reference.spec.ts b/backend/e2e-test/routes/v3/secret-reference.spec.ts new file mode 100644 index 0000000000..560565342e --- /dev/null +++ b/backend/e2e-test/routes/v3/secret-reference.spec.ts @@ -0,0 +1,344 @@ +import { createFolder, deleteFolder } from "e2e-test/testUtils/folders"; +import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports"; +import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets"; + +import { seedData1 } from "@app/db/seed-data"; + +describe("Secret expansion", () => { + const projectId = seedData1.projectV3.id; + + beforeAll(async () => { + const prodRootFolder = await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/", + name: "deep" + }); + + await createFolder({ + authToken: jwtAuthToken, + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep", + name: "nested" + }); + + return async () => { + await deleteFolder({ + authToken: jwtAuthToken, + secretPath: "/", + id: prodRootFolder.id, + workspaceId: projectId, + environmentSlug: "prod" + }); + }; + }); + + test("Local secret reference", async () => { + const secrets = [ + { + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + key: "HELLO", + value: "world" + }, + { + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + key: "TEST", + // eslint-disable-next-line + value: "hello ${HELLO}" + } + ]; + + for (const secret of secrets) { + // eslint-disable-next-line no-await-in-loop + await createSecretV2(secret); + } + + const expandedSecret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + key: "TEST" + }); + expect(expandedSecret.secretValue).toBe("hello world"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken + }); + expect(listSecrets.secrets).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretKey: "TEST", + secretValue: "hello world" + }) + ]) + ); + + await Promise.all(secrets.map((el) => deleteSecretV2(el))); + }); + + test("Cross environment secret reference", async () => { + const secrets = [ + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep", + authToken: jwtAuthToken, + key: "DEEP_KEY_1", + value: "testing" + }, + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep/nested", + authToken: jwtAuthToken, + key: "NESTED_KEY_1", + value: "reference" + }, + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep/nested", + authToken: jwtAuthToken, + key: "NESTED_KEY_2", + // eslint-disable-next-line + value: "secret ${NESTED_KEY_1}" + }, + { + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + key: "KEY", + // eslint-disable-next-line + value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}" + } + ]; + + for (const secret of secrets) { + // eslint-disable-next-line no-await-in-loop + await createSecretV2(secret); + } + + const expandedSecret = await getSecretByNameV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + key: "KEY" + }); + expect(expandedSecret.secretValue).toBe("hello testing secret reference"); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken + }); + expect(listSecrets.secrets).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretKey: "KEY", + secretValue: "hello testing secret reference" + }) + ]) + ); + + await Promise.all(secrets.map((el) => deleteSecretV2(el))); + }); + + test("Non replicated secret import secret expansion on local reference and nested reference", async () => { + const secrets = [ + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep", + authToken: jwtAuthToken, + key: "DEEP_KEY_1", + value: "testing" + }, + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep/nested", + authToken: jwtAuthToken, + key: "NESTED_KEY_1", + value: "reference" + }, + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep/nested", + authToken: jwtAuthToken, + key: "NESTED_KEY_2", + // eslint-disable-next-line + value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}" + }, + { + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + key: "KEY", + // eslint-disable-next-line + value: "hello world" + } + ]; + + for (const secret of secrets) { + // eslint-disable-next-line no-await-in-loop + await createSecretV2(secret); + } + + const secretImportFromProdToDev = await createSecretImport({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + importEnv: "prod", + importPath: "/deep/nested" + }); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken + }); + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretPath: "/deep/nested", + environment: "prod", + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "NESTED_KEY_1", + secretValue: "reference" + }), + expect.objectContaining({ + secretKey: "NESTED_KEY_2", + secretValue: "secret reference testing" + }) + ]) + }) + ]) + ); + + await Promise.all(secrets.map((el) => deleteSecretV2(el))); + await deleteSecretImport({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + authToken: jwtAuthToken, + id: secretImportFromProdToDev.id, + secretPath: "/" + }); + }); + + test( + "Replicated secret import secret expansion on local reference and nested reference", + async () => { + const secrets = [ + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep", + authToken: jwtAuthToken, + key: "DEEP_KEY_1", + value: "testing" + }, + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep/nested", + authToken: jwtAuthToken, + key: "NESTED_KEY_1", + value: "reference" + }, + { + environmentSlug: "prod", + workspaceId: projectId, + secretPath: "/deep/nested", + authToken: jwtAuthToken, + key: "NESTED_KEY_2", + // eslint-disable-next-line + value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}" + }, + { + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + key: "KEY", + // eslint-disable-next-line + value: "hello world" + } + ]; + + for (const secret of secrets) { + // eslint-disable-next-line no-await-in-loop + await createSecretV2(secret); + } + + const secretImportFromProdToDev = await createSecretImport({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken, + importEnv: "prod", + importPath: "/deep/nested", + isReplication: true + }); + + // wait for 5 second for replication to finish + await new Promise((resolve) => { + setTimeout(resolve, 5000); // time to breathe for db + }); + + const listSecrets = await getSecretsV2({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + secretPath: "/", + authToken: jwtAuthToken + }); + expect(listSecrets.imports).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`, + environment: seedData1.environment.slug, + secrets: expect.arrayContaining([ + expect.objectContaining({ + secretKey: "NESTED_KEY_1", + secretValue: "reference" + }), + expect.objectContaining({ + secretKey: "NESTED_KEY_2", + secretValue: "secret reference testing" + }) + ]) + }) + ]) + ); + + await Promise.all(secrets.map((el) => deleteSecretV2(el))); + await deleteSecretImport({ + environmentSlug: seedData1.environment.slug, + workspaceId: projectId, + authToken: jwtAuthToken, + id: secretImportFromProdToDev.id, + secretPath: "/" + }); + }, + { timeout: 10000 } + ); +}); diff --git a/backend/e2e-test/routes/v3/secrets-v2.spec.ts b/backend/e2e-test/routes/v3/secrets-v2.spec.ts new file mode 100644 index 0000000000..dc02587cdc --- /dev/null +++ b/backend/e2e-test/routes/v3/secrets-v2.spec.ts @@ -0,0 +1,577 @@ +import { SecretType } from "@app/db/schemas"; +import { seedData1 } from "@app/db/seed-data"; +import { AuthMode } from "@app/services/auth/auth-type"; + +type TRawSecret = { + secretKey: string; + secretValue: string; + secretComment?: string; + version: number; +}; + +const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => { + const createSecretReqBody = { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + type: dto.type || SecretType.Shared, + secretPath: dto.path, + secretKey: dto.key, + secretValue: dto.value, + secretComment: dto.comment + }; + const createSecRes = await testServer.inject({ + method: "POST", + url: `/api/v3/secrets/raw/${dto.key}`, + headers: { + authorization: `Bearer ${jwtAuthToken}` + }, + body: createSecretReqBody + }); + expect(createSecRes.statusCode).toBe(200); + const createdSecretPayload = JSON.parse(createSecRes.payload); + expect(createdSecretPayload).toHaveProperty("secret"); + return createdSecretPayload.secret as TRawSecret; +}; + +const deleteSecret = async (dto: { path: string; key: string }) => { + const deleteSecRes = await testServer.inject({ + method: "DELETE", + url: `/api/v3/secrets/raw/${dto.key}`, + headers: { + authorization: `Bearer ${jwtAuthToken}` + }, + body: { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + secretPath: dto.path + } + }); + expect(deleteSecRes.statusCode).toBe(200); + const updatedSecretPayload = JSON.parse(deleteSecRes.payload); + expect(updatedSecretPayload).toHaveProperty("secret"); + return updatedSecretPayload.secret as TRawSecret; +}; + +describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }])( + "Secret V2 Architecture - $auth mode", + async ({ auth }) => { + let folderId = ""; + let authToken = ""; + const secretTestCases = [ + { + path: "/", + secret: { + key: "SEC1", + value: "something-secret", + comment: "some comment" + } + }, + { + path: "/nested1/nested2/folder", + secret: { + key: "NESTED-SEC1", + value: "something-secret", + comment: "some comment" + } + }, + { + path: "/", + secret: { + key: "secret-key-2", + value: `-----BEGIN PRIVATE KEY----- + MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn + hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq + fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI + ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15 + QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT + aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46 + IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie + nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi + TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw + q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj + YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP + ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7 + 6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3 + EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt + IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K + d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH + UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL + 3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2 + HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0 + PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8 + Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib + BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb + HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo + QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX + MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9 + omQDpP86RX/hIIQ+JyLSaWYa + -----END PRIVATE KEY-----`, + comment: + "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation" + } + }, + { + path: "/nested1/nested2/folder", + secret: { + key: "secret-key-3", + value: `-----BEGIN PRIVATE KEY----- + MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn + hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq + fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI + ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15 + QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT + aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46 + IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie + nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi + TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw + q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj + YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP + ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7 + 6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3 + EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt + IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K + d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH + UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL + 3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2 + HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0 + PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8 + Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib + BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb + HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo + QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX + MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9 + omQDpP86RX/hIIQ+JyLSaWYa + -----END PRIVATE KEY-----`, + comment: + "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation" + } + }, + { + path: "/nested1/nested2/folder", + secret: { + key: "secret-key-3", + value: + "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGRvIGVpdXNtb2QgdGVtcG9yIGluY2lkaWR1bnQgdXQgbGFib3JlIGV0IGRvbG9yZSBtYWduYSBhbGlxdWEuIFV0IGVuaW0gYWQgbWluaW0gdmVuaWFtLCBxdWlzIG5vc3RydWQgZXhlcmNpdGF0aW9uCg==", + comment: "" + } + } + ]; + + beforeAll(async () => { + if (auth === AuthMode.JWT) { + authToken = jwtAuthToken; + } else if (auth === AuthMode.IDENTITY_ACCESS_TOKEN) { + const identityLogin = await testServer.inject({ + method: "POST", + url: "/api/v1/auth/universal-auth/login", + body: { + clientSecret: seedData1.machineIdentity.clientCredentials.secret, + clientId: seedData1.machineIdentity.clientCredentials.id + } + }); + expect(identityLogin.statusCode).toBe(200); + authToken = identityLogin.json().accessToken; + } + // create a deep folder + const folderCreate = await testServer.inject({ + method: "POST", + url: `/api/v1/folders`, + headers: { + authorization: `Bearer ${jwtAuthToken}` + }, + body: { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + name: "folder", + path: "/nested1/nested2" + } + }); + expect(folderCreate.statusCode).toBe(200); + folderId = folderCreate.json().folder.id; + }); + + afterAll(async () => { + const deleteFolder = await testServer.inject({ + method: "DELETE", + url: `/api/v1/folders/${folderId}`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + path: "/nested1/nested2" + } + }); + expect(deleteFolder.statusCode).toBe(200); + }); + + const getSecrets = async (environment: string, secretPath = "/") => { + const res = await testServer.inject({ + method: "GET", + url: `/api/v3/secrets/raw`, + headers: { + authorization: `Bearer ${authToken}` + }, + query: { + secretPath, + environment, + workspaceId: seedData1.projectV3.id + } + }); + const secrets: TRawSecret[] = JSON.parse(res.payload).secrets || []; + return secrets; + }; + + test.each(secretTestCases)("Create secret in path $path", async ({ secret, path }) => { + const createdSecret = await createSecret({ path, ...secret }); + expect(createdSecret.secretKey).toEqual(secret.key); + expect(createdSecret.secretValue).toEqual(secret.value); + expect(createdSecret.secretComment || "").toEqual(secret.comment); + expect(createdSecret.version).toEqual(1); + + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretKey: secret.key, + secretValue: secret.value, + type: SecretType.Shared + }) + ]) + ); + await deleteSecret({ path, key: secret.key }); + }); + + test.each(secretTestCases)("Get secret by name in path $path", async ({ secret, path }) => { + await createSecret({ path, ...secret }); + + const getSecByNameRes = await testServer.inject({ + method: "GET", + url: `/api/v3/secrets/raw/${secret.key}`, + headers: { + authorization: `Bearer ${authToken}` + }, + query: { + secretPath: path, + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug + } + }); + expect(getSecByNameRes.statusCode).toBe(200); + const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload); + expect(getSecretByNamePayload).toHaveProperty("secret"); + const decryptedSecret = getSecretByNamePayload.secret as TRawSecret; + expect(decryptedSecret.secretKey).toEqual(secret.key); + expect(decryptedSecret.secretValue).toEqual(secret.value); + expect(decryptedSecret.secretComment || "").toEqual(secret.comment); + + await deleteSecret({ path, key: secret.key }); + }); + + if (auth === AuthMode.JWT) { + test.each(secretTestCases)( + "Creating personal secret without shared throw error in path $path", + async ({ secret }) => { + const createSecretReqBody = { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + type: SecretType.Personal, + secretKey: secret.key, + secretValue: secret.value, + secretComment: secret.comment + }; + const createSecRes = await testServer.inject({ + method: "POST", + url: `/api/v3/secrets/raw/SEC2`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: createSecretReqBody + }); + const payload = JSON.parse(createSecRes.payload); + expect(createSecRes.statusCode).toBe(400); + expect(payload.error).toEqual("BadRequest"); + } + ); + + test.each(secretTestCases)("Creating personal secret in path $path", async ({ secret, path }) => { + await createSecret({ path, ...secret }); + + const createSecretReqBody = { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + type: SecretType.Personal, + secretPath: path, + secretKey: secret.key, + secretValue: "personal-value", + secretComment: secret.comment + }; + const createSecRes = await testServer.inject({ + method: "POST", + url: `/api/v3/secrets/raw/${secret.key}`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: createSecretReqBody + }); + expect(createSecRes.statusCode).toBe(200); + + // list secrets should contain personal one and shared one + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretKey: secret.key, + secretValue: secret.value, + type: SecretType.Shared + }), + expect.objectContaining({ + secretKey: secret.key, + secretValue: "personal-value", + type: SecretType.Personal + }) + ]) + ); + + await deleteSecret({ path, key: secret.key }); + }); + + test.each(secretTestCases)( + "Deleting personal one should not delete shared secret in path $path", + async ({ secret, path }) => { + await createSecret({ path, ...secret }); // shared one + await createSecret({ path, ...secret, type: SecretType.Personal }); + + // shared secret deletion should delete personal ones also + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretKey: secret.key, + type: SecretType.Shared + }), + expect.not.objectContaining({ + secretKey: secret.key, + type: SecretType.Personal + }) + ]) + ); + await deleteSecret({ path, key: secret.key }); + } + ); + } + + test.each(secretTestCases)("Update secret in path $path", async ({ path, secret }) => { + await createSecret({ path, ...secret }); + const updateSecretReqBody = { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + type: SecretType.Shared, + secretPath: path, + secretKey: secret.key, + secretValue: "new-value", + secretComment: secret.comment + }; + const updateSecRes = await testServer.inject({ + method: "PATCH", + url: `/api/v3/secrets/raw/${secret.key}`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: updateSecretReqBody + }); + expect(updateSecRes.statusCode).toBe(200); + const updatedSecretPayload = JSON.parse(updateSecRes.payload); + expect(updatedSecretPayload).toHaveProperty("secret"); + const decryptedSecret = updatedSecretPayload.secret; + expect(decryptedSecret.secretKey).toEqual(secret.key); + expect(decryptedSecret.secretValue).toEqual("new-value"); + expect(decryptedSecret.secretComment || "").toEqual(secret.comment); + + // list secret should have updated value + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + secretKey: secret.key, + secretValue: "new-value", + type: SecretType.Shared + }) + ]) + ); + + await deleteSecret({ path, key: secret.key }); + }); + + test.each(secretTestCases)("Delete secret in path $path", async ({ secret, path }) => { + await createSecret({ path, ...secret }); + const deletedSecret = await deleteSecret({ path, key: secret.key }); + expect(deletedSecret.secretKey).toEqual(secret.key); + + // shared secret deletion should delete personal ones also + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.not.arrayContaining([ + expect.objectContaining({ + secretKey: secret.key, + type: SecretType.Shared + }), + expect.objectContaining({ + secretKey: secret.key, + type: SecretType.Personal + }) + ]) + ); + }); + + test.each(secretTestCases)("Bulk create secrets in path $path", async ({ secret, path }) => { + const createSharedSecRes = await testServer.inject({ + method: "POST", + url: `/api/v3/secrets/batch/raw`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + secretPath: path, + secrets: Array.from(Array(5)).map((_e, i) => ({ + secretKey: `BULK-${secret.key}-${i + 1}`, + secretValue: secret.value, + secretComment: secret.comment + })) + } + }); + expect(createSharedSecRes.statusCode).toBe(200); + const createSharedSecPayload = JSON.parse(createSharedSecRes.payload); + expect(createSharedSecPayload).toHaveProperty("secrets"); + + // bulk ones should exist + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.arrayContaining( + Array.from(Array(5)).map((_e, i) => + expect.objectContaining({ + secretKey: `BULK-${secret.key}-${i + 1}`, + secretValue: secret.value, + type: SecretType.Shared + }) + ) + ) + ); + + await Promise.all( + Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` })) + ); + }); + + test.each(secretTestCases)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => { + await createSecret({ ...secret, key: `BULK-${secret.key}-1`, path }); + + const createSharedSecRes = await testServer.inject({ + method: "POST", + url: `/api/v3/secrets/batch/raw`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + secretPath: path, + secrets: Array.from(Array(5)).map((_e, i) => ({ + secretKey: `BULK-${secret.key}-${i + 1}`, + secretValue: secret.value, + secretComment: secret.comment + })) + } + }); + expect(createSharedSecRes.statusCode).toBe(400); + + await deleteSecret({ path, key: `BULK-${secret.key}-1` }); + }); + + test.each(secretTestCases)("Bulk update secrets in path $path", async ({ secret, path }) => { + await Promise.all( + Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path })) + ); + + const updateSharedSecRes = await testServer.inject({ + method: "PATCH", + url: `/api/v3/secrets/batch/raw`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + secretPath: path, + secrets: Array.from(Array(5)).map((_e, i) => ({ + secretKey: `BULK-${secret.key}-${i + 1}`, + secretValue: "update-value", + secretComment: secret.comment + })) + } + }); + expect(updateSharedSecRes.statusCode).toBe(200); + const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload); + expect(updateSharedSecPayload).toHaveProperty("secrets"); + + // bulk ones should exist + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.arrayContaining( + Array.from(Array(5)).map((_e, i) => + expect.objectContaining({ + secretKey: `BULK-${secret.key}-${i + 1}`, + secretValue: "update-value", + type: SecretType.Shared + }) + ) + ) + ); + await Promise.all( + Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` })) + ); + }); + + test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => { + await Promise.all( + Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path })) + ); + + const deletedSharedSecRes = await testServer.inject({ + method: "DELETE", + url: `/api/v3/secrets/batch/raw`, + headers: { + authorization: `Bearer ${authToken}` + }, + body: { + workspaceId: seedData1.projectV3.id, + environment: seedData1.environment.slug, + secretPath: path, + secrets: Array.from(Array(5)).map((_e, i) => ({ + secretKey: `BULK-${secret.key}-${i + 1}` + })) + } + }); + + expect(deletedSharedSecRes.statusCode).toBe(200); + const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload); + expect(deletedSecretPayload).toHaveProperty("secrets"); + + // bulk ones should exist + const secrets = await getSecrets(seedData1.environment.slug, path); + expect(secrets).toEqual( + expect.not.arrayContaining( + Array.from(Array(5)).map((_e, i) => + expect.objectContaining({ + secretKey: `BULK-${secret.value}-${i + 1}`, + type: SecretType.Shared + }) + ) + ) + ); + }); + } +); diff --git a/backend/e2e-test/routes/v3/secrets.spec.ts b/backend/e2e-test/routes/v3/secrets.spec.ts index e7e271279e..c035692ed3 100644 --- a/backend/e2e-test/routes/v3/secrets.spec.ts +++ b/backend/e2e-test/routes/v3/secrets.spec.ts @@ -1075,7 +1075,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => { }, body: createSecretReqBody }); - expect(createSecRes.statusCode).toBe(400); + expect(createSecRes.statusCode).toBe(404); }); test("Update secret raw", async () => { @@ -1093,7 +1093,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => { }, body: updateSecretReqBody }); - expect(updateSecRes.statusCode).toBe(400); + expect(updateSecRes.statusCode).toBe(404); }); test("Delete secret raw", async () => { @@ -1110,6 +1110,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => { }, body: deletedSecretReqBody }); - expect(deletedSecRes.statusCode).toBe(400); + expect(deletedSecRes.statusCode).toBe(404); }); }); diff --git a/backend/e2e-test/testUtils/folders.ts b/backend/e2e-test/testUtils/folders.ts new file mode 100644 index 0000000000..a5484a22e6 --- /dev/null +++ b/backend/e2e-test/testUtils/folders.ts @@ -0,0 +1,73 @@ +type TFolder = { + id: string; + name: string; +}; + +export const createFolder = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + name: string; + authToken: string; +}) => { + const res = await testServer.inject({ + method: "POST", + url: `/api/v1/folders`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + body: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + name: dto.name, + path: dto.secretPath + } + }); + expect(res.statusCode).toBe(200); + return res.json().folder as TFolder; +}; + +export const deleteFolder = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + id: string; + authToken: string; +}) => { + const res = await testServer.inject({ + method: "DELETE", + url: `/api/v1/folders/${dto.id}`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + body: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + path: dto.secretPath + } + }); + expect(res.statusCode).toBe(200); + return res.json().folder as TFolder; +}; + +export const listFolders = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + authToken: string; +}) => { + const res = await testServer.inject({ + method: "GET", + url: `/api/v1/folders`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + body: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + path: dto.secretPath + } + }); + expect(res.statusCode).toBe(200); + return res.json().folders as TFolder[]; +}; diff --git a/backend/e2e-test/testUtils/secret-imports.ts b/backend/e2e-test/testUtils/secret-imports.ts new file mode 100644 index 0000000000..370a8dfb87 --- /dev/null +++ b/backend/e2e-test/testUtils/secret-imports.ts @@ -0,0 +1,93 @@ +type TSecretImport = { + id: string; + importEnv: { + name: string; + slug: string; + id: string; + }; + importPath: string; +}; + +export const createSecretImport = async (dto: { + workspaceId: string; + environmentSlug: string; + isReplication?: boolean; + secretPath: string; + importPath: string; + importEnv: string; + authToken: string; +}) => { + const res = await testServer.inject({ + method: "POST", + url: `/api/v1/secret-imports`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + body: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + isReplication: dto.isReplication, + path: dto.secretPath, + import: { + environment: dto.importEnv, + path: dto.importPath + } + } + }); + + expect(res.statusCode).toBe(200); + const payload = JSON.parse(res.payload); + expect(payload).toHaveProperty("secretImport"); + return payload.secretImport as TSecretImport; +}; + +export const deleteSecretImport = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + authToken: string; + id: string; +}) => { + const res = await testServer.inject({ + method: "DELETE", + url: `/api/v1/secret-imports/${dto.id}`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + body: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + path: dto.secretPath + } + }); + + expect(res.statusCode).toBe(200); + const payload = JSON.parse(res.payload); + expect(payload).toHaveProperty("secretImport"); + return payload.secretImport as TSecretImport; +}; + +export const listSecretImport = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + authToken: string; +}) => { + const res = await testServer.inject({ + method: "GET", + url: `/api/v1/secret-imports`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + query: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + path: dto.secretPath + } + }); + + expect(res.statusCode).toBe(200); + const payload = JSON.parse(res.payload); + expect(payload).toHaveProperty("secretImports"); + return payload.secretImports as TSecretImport[]; +}; diff --git a/backend/e2e-test/testUtils/secrets.ts b/backend/e2e-test/testUtils/secrets.ts new file mode 100644 index 0000000000..96ecc91c6c --- /dev/null +++ b/backend/e2e-test/testUtils/secrets.ts @@ -0,0 +1,128 @@ +import { SecretType } from "@app/db/schemas"; + +type TRawSecret = { + secretKey: string; + secretValue: string; + secretComment?: string; + version: number; +}; + +export const createSecretV2 = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + key: string; + value: string; + comment?: string; + authToken: string; + type?: SecretType; +}) => { + const createSecretReqBody = { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + type: dto.type || SecretType.Shared, + secretPath: dto.secretPath, + secretKey: dto.key, + secretValue: dto.value, + secretComment: dto.comment + }; + const createSecRes = await testServer.inject({ + method: "POST", + url: `/api/v3/secrets/raw/${dto.key}`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + body: createSecretReqBody + }); + expect(createSecRes.statusCode).toBe(200); + const createdSecretPayload = JSON.parse(createSecRes.payload); + expect(createdSecretPayload).toHaveProperty("secret"); + return createdSecretPayload.secret as TRawSecret; +}; + +export const deleteSecretV2 = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + key: string; + authToken: string; +}) => { + const deleteSecRes = await testServer.inject({ + method: "DELETE", + url: `/api/v3/secrets/raw/${dto.key}`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + body: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + secretPath: dto.secretPath + } + }); + expect(deleteSecRes.statusCode).toBe(200); + const updatedSecretPayload = JSON.parse(deleteSecRes.payload); + expect(updatedSecretPayload).toHaveProperty("secret"); + return updatedSecretPayload.secret as TRawSecret; +}; + +export const getSecretByNameV2 = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + key: string; + authToken: string; +}) => { + const response = await testServer.inject({ + method: "GET", + url: `/api/v3/secrets/raw/${dto.key}`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + query: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + secretPath: dto.secretPath, + expandSecretReferences: "true", + include_imports: "true" + } + }); + expect(response.statusCode).toBe(200); + const payload = JSON.parse(response.payload); + expect(payload).toHaveProperty("secret"); + return payload.secret as TRawSecret; +}; + +export const getSecretsV2 = async (dto: { + workspaceId: string; + environmentSlug: string; + secretPath: string; + authToken: string; +}) => { + const getSecretsResponse = await testServer.inject({ + method: "GET", + url: `/api/v3/secrets/raw`, + headers: { + authorization: `Bearer ${dto.authToken}` + }, + query: { + workspaceId: dto.workspaceId, + environment: dto.environmentSlug, + secretPath: dto.secretPath, + expandSecretReferences: "true", + include_imports: "true" + } + }); + expect(getSecretsResponse.statusCode).toBe(200); + const getSecretsPayload = JSON.parse(getSecretsResponse.payload); + expect(getSecretsPayload).toHaveProperty("secrets"); + expect(getSecretsPayload).toHaveProperty("imports"); + return getSecretsPayload as { + secrets: TRawSecret[]; + imports: { + secretPath: string; + environment: string; + folderId: string; + secrets: TRawSecret[]; + }[]; + }; +}; diff --git a/backend/e2e-test/vitest-environment-knex.ts b/backend/e2e-test/vitest-environment-knex.ts index 09ab054436..866b0f45ff 100644 --- a/backend/e2e-test/vitest-environment-knex.ts +++ b/backend/e2e-test/vitest-environment-knex.ts @@ -3,7 +3,6 @@ import "ts-node/register"; import dotenv from "dotenv"; import jwt from "jsonwebtoken"; -import knex from "knex"; import path from "path"; import { seedData1 } from "@app/db/seed-data"; @@ -12,9 +11,12 @@ import { initLogger } from "@app/lib/logger"; import { main } from "@app/server/app"; import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type"; -import { mockQueue } from "./mocks/queue"; import { mockSmtpServer } from "./mocks/smtp"; -import { mockKeyStore } from "./mocks/keystore"; +import { initDbConnection } from "@app/db"; +import { queueServiceFactory } from "@app/queue"; +import { keyStoreFactory } from "@app/keystore/keystore"; +import { Redis } from "ioredis"; +import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns"; dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true }); export default { @@ -23,27 +25,42 @@ export default { async setup() { const logger = await initLogger(); const cfg = initEnvConfig(logger); - const db = knex({ - client: "pg", - connection: cfg.DB_CONNECTION_URI, - migrations: { + const db = initDbConnection({ + dbConnectionUri: cfg.DB_CONNECTION_URI, + dbRootCert: cfg.DB_ROOT_CERT + }); + + const redis = new Redis(cfg.REDIS_URL); + await redis.flushdb("SYNC"); + + try { + await db.migrate.rollback( + { + directory: path.join(__dirname, "../src/db/migrations"), + extension: "ts", + tableName: "infisical_migrations" + }, + true + ); + await db.migrate.latest({ directory: path.join(__dirname, "../src/db/migrations"), extension: "ts", tableName: "infisical_migrations" - }, - seeds: { + }); + + await db.seed.run({ directory: path.join(__dirname, "../src/db/seeds"), extension: "ts" - } - }); - - try { - await db.migrate.latest(); - await db.seed.run(); + }); const smtp = mockSmtpServer(); - const queue = mockQueue(); - const keyStore = mockKeyStore(); - const server = await main({ db, smtp, logger, queue, keyStore }); + const queue = queueServiceFactory(cfg.REDIS_URL); + const keyStore = keyStoreFactory(cfg.REDIS_URL); + + const hsmModule = initializeHsmModule(); + hsmModule.initialize(); + + const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() }); + // @ts-expect-error type globalThis.testServer = server; // @ts-expect-error type @@ -60,10 +77,12 @@ export default { { expiresIn: cfg.JWT_AUTH_LIFETIME } ); } catch (error) { + // eslint-disable-next-line console.log("[TEST] Error setting up environment", error); await db.destroy(); throw error; } + // custom setup return { async teardown() { @@ -74,7 +93,17 @@ export default { // @ts-expect-error type delete globalThis.jwtToken; // called after all tests with this env have been run - await db.migrate.rollback({}, true); + await db.migrate.rollback( + { + directory: path.join(__dirname, "../src/db/migrations"), + extension: "ts", + tableName: "infisical_migrations" + }, + true + ); + + await redis.flushdb("ASYNC"); + redis.disconnect(); await db.destroy(); } }; diff --git a/backend/package.json b/backend/package.json index b7945356cf..9a3663b1d7 100644 --- a/backend/package.json +++ b/backend/package.json @@ -3,26 +3,62 @@ "version": "1.0.0", "description": "", "main": "./dist/main.mjs", + "bin": "dist/main.js", + "pkg": { + "scripts": [ + "dist/**/*.js", + "../frontend/node_modules/next/**/*.js", + "../frontend/.next/*/**/*.js", + "../frontend/node_modules/next/dist/server/**/*.js", + "../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js" + ], + "assets": [ + "dist/**", + "!dist/**/*.js", + "node_modules/**", + "../frontend/node_modules/**", + "../frontend/.next/**", + "!../frontend/node_modules/next/dist/server/**/*.js", + "../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*", + "../frontend/public/**" + ], + "outputPath": "binary" + }, "scripts": { + "binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports", + "binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .", + "binary:babel-backend": " babel ./dist -d ./dist", + "binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server", + "binary:clean": "rm -rf ./dist && rm -rf ./binary", + "binary:rename-imports": "ts-node ./scripts/rename-mjs.ts", "test": "echo \"Error: no test specified\" && exit 1", "dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine", "dev:docker": "nodemon", - "build": "tsup", - "start": "node dist/main.mjs", + "build": "tsup --sourcemap", + "build:frontend": "npm run build --prefix ../frontend", + "start": "node --enable-source-maps dist/main.mjs", "type:check": "tsc --noEmit", "lint:fix": "eslint --fix --ext js,ts ./src", "lint": "eslint 'src/**/*.ts'", - "test:e2e": "vitest run -c vitest.e2e.config.ts", - "test:e2e-watch": "vitest -c vitest.e2e.config.ts", + "test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1", + "test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1", "test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts", "generate:component": "tsx ./scripts/create-backend-file.ts", - "generate:schema": "tsx ./scripts/generate-schema-types.ts", + "generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas", + "auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest", + "auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up", + "auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down", + "auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list", + "auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status", + "auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback", "migration:new": "tsx ./scripts/create-migration.ts", - "migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up", - "migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down", - "migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list", - "migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest", - "migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback", + "migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up", + "migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down", + "migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list", + "migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest", + "migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status", + "migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback", + "migrate:org": "tsx ./scripts/migrate-organization.ts", "seed:new": "tsx ./scripts/create-seed-file.ts", "seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run", "db:reset": "npm run migration:rollback -- --all && npm run migration:latest" @@ -31,6 +67,11 @@ "author": "", "license": "ISC", "devDependencies": { + "@babel/cli": "^7.18.10", + "@babel/core": "^7.18.10", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/preset-env": "^7.18.10", + "@babel/preset-react": "^7.24.7", "@types/bcrypt": "^5.0.2", "@types/jmespath": "^0.15.2", "@types/jsonwebtoken": "^9.0.5", @@ -43,11 +84,16 @@ "@types/passport-google-oauth20": "^2.0.14", "@types/pg": "^8.10.9", "@types/picomatch": "^2.3.3", + "@types/pkcs11js": "^1.0.4", "@types/prompt-sync": "^4.2.3", "@types/resolve": "^1.20.6", + "@types/safe-regex": "^1.1.6", + "@types/sjcl": "^1.0.34", "@types/uuid": "^9.0.7", "@typescript-eslint/eslint-plugin": "^6.20.0", "@typescript-eslint/parser": "^6.20.0", + "@yao-pkg/pkg": "^5.12.0", + "babel-plugin-transform-import-meta": "^2.2.1", "eslint": "^8.56.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-config-airbnb-typescript": "^17.1.0", @@ -60,30 +106,36 @@ "pino-pretty": "^10.2.3", "prompt-sync": "^4.2.0", "rimraf": "^5.0.5", - "ts-node": "^10.9.1", + "ts-node": "^10.9.2", "tsc-alias": "^1.8.8", "tsconfig-paths": "^4.2.0", "tsup": "^8.0.1", "tsx": "^4.4.0", "typescript": "^5.3.2", - "vite-tsconfig-paths": "^4.2.2", "vitest": "^1.2.2" }, "dependencies": { + "@aws-sdk/client-elasticache": "^3.637.0", "@aws-sdk/client-iam": "^3.525.0", + "@aws-sdk/client-kms": "^3.609.0", "@aws-sdk/client-secrets-manager": "^3.504.0", + "@aws-sdk/client-sts": "^3.600.0", "@casl/ability": "^6.5.0", + "@elastic/elasticsearch": "^8.15.0", "@fastify/cookie": "^9.3.1", "@fastify/cors": "^8.5.0", "@fastify/etag": "^5.1.0", "@fastify/formbody": "^7.4.0", "@fastify/helmet": "^11.1.1", + "@fastify/multipart": "8.3.0", "@fastify/passport": "^2.4.0", "@fastify/rate-limit": "^9.0.0", "@fastify/session": "^10.7.0", "@fastify/swagger": "^8.14.0", "@fastify/swagger-ui": "^2.1.0", "@node-saml/passport-saml": "^4.0.4", + "@octokit/auth-app": "^7.1.1", + "@octokit/plugin-retry": "^5.0.5", "@octokit/rest": "^20.0.2", "@octokit/webhooks-types": "^7.3.1", "@opentelemetry/api": "^1.8.0", @@ -94,8 +146,13 @@ "@opentelemetry/resources": "^1.24.1", "@opentelemetry/sdk-metrics": "^1.24.1", "@opentelemetry/semantic-conventions": "^1.24.1", + "@peculiar/asn1-schema": "^2.3.8", + "@peculiar/x509": "^1.12.1", "@serdnam/pino-cloudwatch-transport": "^1.0.4", - "@sindresorhus/slugify": "^2.2.1", + "@sindresorhus/slugify": "1.1.0", + "@slack/oauth": "^3.0.1", + "@slack/web-api": "^7.3.4", + "@team-plain/typescript-sdk": "^4.6.1", "@ucast/mongo2js": "^1.3.4", "ajv": "^8.12.0", "argon2": "^0.31.2", @@ -105,26 +162,34 @@ "bcrypt": "^5.1.1", "bullmq": "^5.4.2", "cassandra-driver": "^4.7.2", + "connect-redis": "^7.1.1", + "cron": "^3.1.7", "dotenv": "^16.4.1", - "fastify": "^4.26.0", + "fastify": "^4.28.1", "fastify-plugin": "^4.5.1", "google-auth-library": "^9.9.0", "googleapis": "^137.1.0", "handlebars": "^4.7.8", + "hdb": "^0.19.10", "ioredis": "^5.3.2", "jmespath": "^0.16.0", "jsonwebtoken": "^9.0.2", "jsrp": "^0.2.4", + "jwks-rsa": "^3.1.0", "knex": "^3.0.1", "ldapjs": "^3.0.7", + "ldif": "0.5.1", "libsodium-wrappers": "^0.7.13", "lodash.isequal": "^4.5.0", + "mongodb": "^6.8.1", "ms": "^2.1.3", "mysql2": "^3.9.8", - "nanoid": "^5.0.4", + "nanoid": "^3.3.4", "nodemailer": "^6.9.9", + "openid-client": "^5.6.5", "ora": "^7.0.1", "oracledb": "^6.4.0", + "otplib": "^12.0.1", "passport-github": "^1.1.0", "passport-gitlab2": "^5.0.0", "passport-google-oauth20": "^2.0.0", @@ -133,9 +198,17 @@ "pg-query-stream": "^4.5.3", "picomatch": "^3.0.1", "pino": "^8.16.2", + "pkcs11js": "^2.1.6", + "pkijs": "^3.2.4", "posthog-node": "^3.6.2", - "probot": "^13.0.0", + "probot": "^13.3.8", + "safe-regex": "^2.1.1", + "scim-patch": "^0.8.3", + "scim2-parse-filter": "^0.2.10", + "sjcl": "^1.0.8", "smee-client": "^2.0.0", + "snowflake-sdk": "^1.14.0", + "tedious": "^18.2.1", "tweetnacl": "^1.0.3", "tweetnacl-util": "^0.15.1", "uuid": "^9.0.1", diff --git a/backend/scripts/create-backend-file.ts b/backend/scripts/create-backend-file.ts index fb71994cef..39e9c816c3 100644 --- a/backend/scripts/create-backend-file.ts +++ b/backend/scripts/create-backend-file.ts @@ -7,14 +7,33 @@ const prompt = promptSync({ sigint: true }); +type ComponentType = 1 | 2 | 3; + console.log(` Component List -------------- +0. Exit 1. Service component 2. DAL component 3. Router component `); -const componentType = parseInt(prompt("Select a component: "), 10); + +function getComponentType(): ComponentType { + while (true) { + const input = prompt("Select a component (0-3): "); + const componentType = parseInt(input, 10); + + if (componentType === 0) { + console.log("Exiting the program. Goodbye!"); + process.exit(0); + } else if (componentType === 1 || componentType === 2 || componentType === 3) { + return componentType; + } else { + console.log("Invalid input. Please enter 0, 1, 2, or 3."); + } + } +} +const componentType = getComponentType(); if (componentType === 1) { const componentName = prompt("Enter service name: "); diff --git a/backend/scripts/create-migration.ts b/backend/scripts/create-migration.ts index 59040a37ac..34f4aca419 100644 --- a/backend/scripts/create-migration.ts +++ b/backend/scripts/create-migration.ts @@ -2,13 +2,14 @@ import { execSync } from "child_process"; import path from "path"; import promptSync from "prompt-sync"; +import slugify from "@sindresorhus/slugify" const prompt = promptSync({ sigint: true }); const migrationName = prompt("Enter name for migration: "); // Remove spaces from migration name and replace with hyphens -const formattedMigrationName = migrationName.replace(/\s+/g, "-"); +const formattedMigrationName = slugify(migrationName); execSync( `npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`, diff --git a/backend/scripts/generate-schema-types.ts b/backend/scripts/generate-schema-types.ts index 43984ecfac..fc398c2acf 100644 --- a/backend/scripts/generate-schema-types.ts +++ b/backend/scripts/generate-schema-types.ts @@ -90,7 +90,12 @@ const main = async () => { .whereRaw("table_schema = current_schema()") .select<{ tableName: string }[]>("table_name as tableName") .orderBy("table_name") - ).filter((el) => !el.tableName.includes("_migrations")); + ).filter( + (el) => + !el.tableName.includes("_migrations") && + !el.tableName.includes("audit_logs_") && + el.tableName !== "intermediate_audit_logs" + ); for (let i = 0; i < tables.length; i += 1) { const { tableName } = tables[i]; diff --git a/backend/scripts/migrate-organization.ts b/backend/scripts/migrate-organization.ts new file mode 100644 index 0000000000..ca6aa904db --- /dev/null +++ b/backend/scripts/migrate-organization.ts @@ -0,0 +1,84 @@ +/* eslint-disable */ +import promptSync from "prompt-sync"; +import { execSync } from "child_process"; +import path from "path"; +import { existsSync } from "fs"; + +const prompt = promptSync({ + sigint: true +}); + +const exportDb = () => { + const exportHost = prompt("Enter your Postgres Host to migrate from: "); + const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"; + const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"; + const exportPassword = prompt("Enter your Postgres Password to migrate from: "); + const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"; + + // we do not include the audit_log and secret_sharing entries + execSync( + `PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join( + __dirname, + "../src/db/dump.sql" + )}`, + { stdio: "inherit" } + ); +}; + +const importDbForOrg = () => { + const importHost = prompt("Enter your Postgres Host to migrate to: "); + const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432"; + const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"; + const importPassword = prompt("Enter your Postgres Password to migrate to: "); + const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"; + const orgId = prompt("Enter the organization ID to migrate: "); + + if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) { + console.log("File not found, please export the database first."); + return; + } + + execSync( + `PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join( + __dirname, + "../src/db/dump.sql" + )}` + ); + + execSync( + `PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"` + ); + + // delete global/instance-level resources not relevant to the organization to migrate + // users + execSync( + `PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'` + ); + + // identities + execSync( + `PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'` + ); + + // reset slack configuration in superAdmin + execSync( + `PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'` + ); + + console.log("Organization migrated successfully."); +}; + +const main = () => { + const action = prompt( + "Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: " + ); + if (action === "1") { + exportDb(); + } else if (action === "2") { + importDbForOrg(); + } else { + console.log("Invalid action"); + } +}; + +main(); diff --git a/backend/scripts/rename-mjs.ts b/backend/scripts/rename-mjs.ts new file mode 100644 index 0000000000..793cb98917 --- /dev/null +++ b/backend/scripts/rename-mjs.ts @@ -0,0 +1,27 @@ +/* eslint-disable @typescript-eslint/no-shadow */ +import fs from "node:fs"; +import path from "node:path"; + +function replaceMjsOccurrences(directory: string) { + fs.readdir(directory, (err, files) => { + if (err) throw err; + files.forEach((file) => { + const filePath = path.join(directory, file); + if (fs.statSync(filePath).isDirectory()) { + replaceMjsOccurrences(filePath); + } else { + fs.readFile(filePath, "utf8", (err, data) => { + if (err) throw err; + const result = data.replace(/\.mjs/g, ".js"); + fs.writeFile(filePath, result, "utf8", (err) => { + if (err) throw err; + // eslint-disable-next-line no-console + console.log(`Updated: ${filePath}`); + }); + }); + } + }); + }); +} + +replaceMjsOccurrences("dist"); diff --git a/backend/src/@types/fastify.d.ts b/backend/src/@types/fastify.d.ts index 3f1ca94e9e..2843648da7 100644 --- a/backend/src/@types/fastify.d.ts +++ b/backend/src/@types/fastify.d.ts @@ -6,14 +6,22 @@ import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-ap import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service"; import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types"; import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service"; +import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service"; +import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service"; import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service"; import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service"; +import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service"; import { TGroupServiceFactory } from "@app/ee/services/group/group-service"; import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service"; +import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service"; import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service"; import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service"; +import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service"; +import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types"; import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service"; import { TScimServiceFactory } from "@app/ee/services/scim/scim-service"; import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service"; @@ -29,19 +37,31 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service"; import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service"; import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type"; import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; +import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service"; +import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service"; +import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service"; +import { TCmekServiceFactory } from "@app/services/cmek/cmek-service"; +import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service"; +import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service"; import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service"; +import { THsmServiceFactory } from "@app/services/hsm/hsm-service"; import { TIdentityServiceFactory } from "@app/services/identity/identity-service"; import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service"; import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service"; import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service"; import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service"; import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service"; +import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service"; import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service"; +import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service"; import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service"; import { TIntegrationServiceFactory } from "@app/services/integration/integration-service"; import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service"; import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service"; import { TOrgServiceFactory } from "@app/services/org/org-service"; +import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service"; +import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service"; +import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service"; import { TProjectServiceFactory } from "@app/services/project/project-service"; import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service"; @@ -56,11 +76,15 @@ import { TSecretReplicationServiceFactory } from "@app/services/secret-replicati import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service"; import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service"; import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service"; +import { TSlackServiceFactory } from "@app/services/slack/slack-service"; import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service"; import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service"; +import { TTotpServiceFactory } from "@app/services/totp/totp-service"; import { TUserDALFactory } from "@app/services/user/user-dal"; import { TUserServiceFactory } from "@app/services/user/user-service"; +import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service"; import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service"; +import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service"; declare module "fastify" { interface FastifyRequest { @@ -79,6 +103,7 @@ declare module "fastify" { id: string; orgId: string; }; + rateLimits: RateLimitConfiguration; // passport data passportUser: { isUserCompleted: string; @@ -98,11 +123,13 @@ declare module "fastify" { permission: TPermissionServiceFactory; org: TOrgServiceFactory; orgRole: TOrgRoleServiceFactory; + oidc: TOidcConfigServiceFactory; superAdmin: TSuperAdminServiceFactory; user: TUserServiceFactory; group: TGroupServiceFactory; groupProject: TGroupProjectServiceFactory; apiKey: TApiKeyServiceFactory; + pkiAlert: TPkiAlertServiceFactory; project: TProjectServiceFactory; projectMembership: TProjectMembershipServiceFactory; projectEnv: TProjectEnvServiceFactory; @@ -121,11 +148,13 @@ declare module "fastify" { identity: TIdentityServiceFactory; identityAccessToken: TIdentityAccessTokenServiceFactory; identityProject: TIdentityProjectServiceFactory; + identityTokenAuth: TIdentityTokenAuthServiceFactory; identityUa: TIdentityUaServiceFactory; identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory; identityGcpAuth: TIdentityGcpAuthServiceFactory; identityAwsAuth: TIdentityAwsAuthServiceFactory; identityAzureAuth: TIdentityAzureAuthServiceFactory; + identityOidcAuth: TIdentityOidcAuthServiceFactory; accessApprovalPolicy: TAccessApprovalPolicyServiceFactory; accessApprovalRequest: TAccessApprovalRequestServiceFactory; secretApprovalPolicy: TSecretApprovalPolicyServiceFactory; @@ -137,6 +166,12 @@ declare module "fastify" { ldap: TLdapConfigServiceFactory; auditLog: TAuditLogServiceFactory; auditLogStream: TAuditLogStreamServiceFactory; + certificate: TCertificateServiceFactory; + certificateTemplate: TCertificateTemplateServiceFactory; + certificateAuthority: TCertificateAuthorityServiceFactory; + certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory; + certificateEst: TCertificateEstServiceFactory; + pkiCollection: TPkiCollectionServiceFactory; secretScanning: TSecretScanningServiceFactory; license: TLicenseServiceFactory; trustedIp: TTrustedIpServiceFactory; @@ -146,7 +181,20 @@ declare module "fastify" { dynamicSecretLease: TDynamicSecretLeaseServiceFactory; projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory; identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory; + identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory; secretSharing: TSecretSharingServiceFactory; + rateLimit: TRateLimitServiceFactory; + userEngagement: TUserEngagementServiceFactory; + externalKms: TExternalKmsServiceFactory; + hsm: THsmServiceFactory; + orgAdmin: TOrgAdminServiceFactory; + slack: TSlackServiceFactory; + workflowIntegration: TWorkflowIntegrationServiceFactory; + cmek: TCmekServiceFactory; + migration: TExternalMigrationServiceFactory; + externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory; + projectTemplate: TProjectTemplateServiceFactory; + totp: TTotpServiceFactory; }; // this is exclusive use for middlewares in which we need to inject data // everywhere else access using service layer diff --git a/backend/src/@types/hdb.d.ts b/backend/src/@types/hdb.d.ts new file mode 100644 index 0000000000..4d8f785675 --- /dev/null +++ b/backend/src/@types/hdb.d.ts @@ -0,0 +1,4 @@ +declare module "hdb" { + // eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`. + function createClient(options): any; +} diff --git a/backend/src/@types/knex.d.ts b/backend/src/@types/knex.d.ts index 117a74e765..f5c44ff794 100644 --- a/backend/src/@types/knex.d.ts +++ b/backend/src/@types/knex.d.ts @@ -1,4 +1,4 @@ -import { Knex } from "knex"; +import { Knex as KnexOriginal } from "knex"; import { TableName, @@ -32,12 +32,42 @@ import { TBackupPrivateKey, TBackupPrivateKeyInsert, TBackupPrivateKeyUpdate, + TCertificateAuthorities, + TCertificateAuthoritiesInsert, + TCertificateAuthoritiesUpdate, + TCertificateAuthorityCerts, + TCertificateAuthorityCertsInsert, + TCertificateAuthorityCertsUpdate, + TCertificateAuthorityCrl, + TCertificateAuthorityCrlInsert, + TCertificateAuthorityCrlUpdate, + TCertificateAuthoritySecret, + TCertificateAuthoritySecretInsert, + TCertificateAuthoritySecretUpdate, + TCertificateBodies, + TCertificateBodiesInsert, + TCertificateBodiesUpdate, + TCertificates, + TCertificateSecrets, + TCertificateSecretsInsert, + TCertificateSecretsUpdate, + TCertificatesInsert, + TCertificatesUpdate, + TCertificateTemplateEstConfigs, + TCertificateTemplateEstConfigsInsert, + TCertificateTemplateEstConfigsUpdate, + TCertificateTemplates, + TCertificateTemplatesInsert, + TCertificateTemplatesUpdate, TDynamicSecretLeases, TDynamicSecretLeasesInsert, TDynamicSecretLeasesUpdate, TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate, + TExternalKms, + TExternalKmsInsert, + TExternalKmsUpdate, TGitAppInstallSessions, TGitAppInstallSessionsInsert, TGitAppInstallSessionsUpdate, @@ -71,6 +101,12 @@ import { TIdentityKubernetesAuths, TIdentityKubernetesAuthsInsert, TIdentityKubernetesAuthsUpdate, + TIdentityMetadata, + TIdentityMetadataInsert, + TIdentityMetadataUpdate, + TIdentityOidcAuths, + TIdentityOidcAuthsInsert, + TIdentityOidcAuthsUpdate, TIdentityOrgMemberships, TIdentityOrgMembershipsInsert, TIdentityOrgMembershipsUpdate, @@ -83,6 +119,9 @@ import { TIdentityProjectMemberships, TIdentityProjectMembershipsInsert, TIdentityProjectMembershipsUpdate, + TIdentityTokenAuths, + TIdentityTokenAuthsInsert, + TIdentityTokenAuthsUpdate, TIdentityUaClientSecrets, TIdentityUaClientSecretsInsert, TIdentityUaClientSecretsUpdate, @@ -98,6 +137,9 @@ import { TIntegrations, TIntegrationsInsert, TIntegrationsUpdate, + TInternalKms, + TInternalKmsInsert, + TInternalKmsUpdate, TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate, @@ -113,6 +155,9 @@ import { TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate, + TOidcConfigs, + TOidcConfigsInsert, + TOidcConfigsUpdate, TOrganizations, TOrganizationsInsert, TOrganizationsUpdate, @@ -125,6 +170,15 @@ import { TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate, + TPkiAlerts, + TPkiAlertsInsert, + TPkiAlertsUpdate, + TPkiCollectionItems, + TPkiCollectionItemsInsert, + TPkiCollectionItemsUpdate, + TPkiCollections, + TPkiCollectionsInsert, + TPkiCollectionsUpdate, TProjectBots, TProjectBotsInsert, TProjectBotsUpdate, @@ -142,13 +196,22 @@ import { TProjectRolesUpdate, TProjects, TProjectsInsert, + TProjectSlackConfigs, + TProjectSlackConfigsInsert, + TProjectSlackConfigsUpdate, TProjectsUpdate, + TProjectTemplates, + TProjectTemplatesInsert, + TProjectTemplatesUpdate, TProjectUserAdditionalPrivilege, TProjectUserAdditionalPrivilegeInsert, TProjectUserAdditionalPrivilegeUpdate, TProjectUserMembershipRoles, TProjectUserMembershipRolesInsert, TProjectUserMembershipRolesUpdate, + TRateLimit, + TRateLimitInsert, + TRateLimitUpdate, TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate, @@ -165,6 +228,9 @@ import { TSecretApprovalRequestSecretTags, TSecretApprovalRequestSecretTagsInsert, TSecretApprovalRequestSecretTagsUpdate, + TSecretApprovalRequestSecretTagsV2, + TSecretApprovalRequestSecretTagsV2Insert, + TSecretApprovalRequestSecretTagsV2Update, TSecretApprovalRequestsInsert, TSecretApprovalRequestsReviewers, TSecretApprovalRequestsReviewersInsert, @@ -172,6 +238,9 @@ import { TSecretApprovalRequestsSecrets, TSecretApprovalRequestsSecretsInsert, TSecretApprovalRequestsSecretsUpdate, + TSecretApprovalRequestsSecretsV2, + TSecretApprovalRequestsSecretsV2Insert, + TSecretApprovalRequestsSecretsV2Update, TSecretApprovalRequestsUpdate, TSecretBlindIndexes, TSecretBlindIndexesInsert, @@ -188,9 +257,15 @@ import { TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate, + TSecretReferencesV2, + TSecretReferencesV2Insert, + TSecretReferencesV2Update, TSecretRotationOutputs, TSecretRotationOutputsInsert, TSecretRotationOutputsUpdate, + TSecretRotationOutputV2, + TSecretRotationOutputV2Insert, + TSecretRotationOutputV2Update, TSecretRotations, TSecretRotationsInsert, TSecretRotationsUpdate, @@ -209,6 +284,9 @@ import { TSecretSnapshotSecrets, TSecretSnapshotSecretsInsert, TSecretSnapshotSecretsUpdate, + TSecretSnapshotSecretsV2, + TSecretSnapshotSecretsV2Insert, + TSecretSnapshotSecretsV2Update, TSecretSnapshotsInsert, TSecretSnapshotsUpdate, TSecretsUpdate, @@ -224,12 +302,21 @@ import { TSecretVersionTagJunction, TSecretVersionTagJunctionInsert, TSecretVersionTagJunctionUpdate, + TSecretVersionV2TagJunction, + TSecretVersionV2TagJunctionInsert, + TSecretVersionV2TagJunctionUpdate, TServiceTokens, TServiceTokensInsert, TServiceTokensUpdate, + TSlackIntegrations, + TSlackIntegrationsInsert, + TSlackIntegrationsUpdate, TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate, + TTotpConfigs, + TTotpConfigsInsert, + TTotpConfigsUpdate, TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate, @@ -250,288 +337,498 @@ import { TUsersUpdate, TWebhooks, TWebhooksInsert, - TWebhooksUpdate + TWebhooksUpdate, + TWorkflowIntegrations, + TWorkflowIntegrationsInsert, + TWorkflowIntegrationsUpdate } from "@app/db/schemas"; +import { + TExternalGroupOrgRoleMappings, + TExternalGroupOrgRoleMappingsInsert, + TExternalGroupOrgRoleMappingsUpdate +} from "@app/db/schemas/external-group-org-role-mappings"; +import { + TSecretV2TagJunction, + TSecretV2TagJunctionInsert, + TSecretV2TagJunctionUpdate +} from "@app/db/schemas/secret-v2-tag-junction"; +import { + TSecretVersionsV2, + TSecretVersionsV2Insert, + TSecretVersionsV2Update +} from "@app/db/schemas/secret-versions-v2"; +import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2"; + +declare module "knex" { + namespace Knex { + interface QueryInterface { + primaryNode(): KnexOriginal; + replicaNode(): KnexOriginal; + } + } +} declare module "knex/types/tables" { interface Tables { - [TableName.Users]: Knex.CompositeTableType; - [TableName.Groups]: Knex.CompositeTableType; - [TableName.UserGroupMembership]: Knex.CompositeTableType< + [TableName.Users]: KnexOriginal.CompositeTableType; + [TableName.Groups]: KnexOriginal.CompositeTableType; + [TableName.CertificateAuthority]: KnexOriginal.CompositeTableType< + TCertificateAuthorities, + TCertificateAuthoritiesInsert, + TCertificateAuthoritiesUpdate + >; + [TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType< + TCertificateAuthorityCerts, + TCertificateAuthorityCertsInsert, + TCertificateAuthorityCertsUpdate + >; + [TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType< + TCertificateAuthoritySecret, + TCertificateAuthoritySecretInsert, + TCertificateAuthoritySecretUpdate + >; + [TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType< + TCertificateAuthorityCrl, + TCertificateAuthorityCrlInsert, + TCertificateAuthorityCrlUpdate + >; + [TableName.Certificate]: KnexOriginal.CompositeTableType; + [TableName.CertificateTemplate]: KnexOriginal.CompositeTableType< + TCertificateTemplates, + TCertificateTemplatesInsert, + TCertificateTemplatesUpdate + >; + [TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType< + TCertificateTemplateEstConfigs, + TCertificateTemplateEstConfigsInsert, + TCertificateTemplateEstConfigsUpdate + >; + [TableName.CertificateBody]: KnexOriginal.CompositeTableType< + TCertificateBodies, + TCertificateBodiesInsert, + TCertificateBodiesUpdate + >; + [TableName.CertificateSecret]: KnexOriginal.CompositeTableType< + TCertificateSecrets, + TCertificateSecretsInsert, + TCertificateSecretsUpdate + >; + [TableName.PkiAlert]: KnexOriginal.CompositeTableType; + [TableName.PkiCollection]: KnexOriginal.CompositeTableType< + TPkiCollections, + TPkiCollectionsInsert, + TPkiCollectionsUpdate + >; + [TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType< + TPkiCollectionItems, + TPkiCollectionItemsInsert, + TPkiCollectionItemsUpdate + >; + [TableName.UserGroupMembership]: KnexOriginal.CompositeTableType< TUserGroupMembership, TUserGroupMembershipInsert, TUserGroupMembershipUpdate >; - [TableName.GroupProjectMembership]: Knex.CompositeTableType< + [TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType< TGroupProjectMemberships, TGroupProjectMembershipsInsert, TGroupProjectMembershipsUpdate >; - [TableName.GroupProjectMembershipRole]: Knex.CompositeTableType< + [TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType< TGroupProjectMembershipRoles, TGroupProjectMembershipRolesInsert, TGroupProjectMembershipRolesUpdate >; - [TableName.UserAliases]: Knex.CompositeTableType; - [TableName.UserEncryptionKey]: Knex.CompositeTableType< + [TableName.UserAliases]: KnexOriginal.CompositeTableType; + [TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType< TUserEncryptionKeys, TUserEncryptionKeysInsert, TUserEncryptionKeysUpdate >; - [TableName.AuthTokens]: Knex.CompositeTableType; - [TableName.AuthTokenSession]: Knex.CompositeTableType< + [TableName.AuthTokens]: KnexOriginal.CompositeTableType; + [TableName.AuthTokenSession]: KnexOriginal.CompositeTableType< TAuthTokenSessions, TAuthTokenSessionsInsert, TAuthTokenSessionsUpdate >; - [TableName.BackupPrivateKey]: Knex.CompositeTableType< + [TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType< TBackupPrivateKey, TBackupPrivateKeyInsert, TBackupPrivateKeyUpdate >; - [TableName.Organization]: Knex.CompositeTableType; - [TableName.OrgMembership]: Knex.CompositeTableType; - [TableName.OrgRoles]: Knex.CompositeTableType; - [TableName.IncidentContact]: Knex.CompositeTableType< + [TableName.Organization]: KnexOriginal.CompositeTableType< + TOrganizations, + TOrganizationsInsert, + TOrganizationsUpdate + >; + [TableName.OrgMembership]: KnexOriginal.CompositeTableType< + TOrgMemberships, + TOrgMembershipsInsert, + TOrgMembershipsUpdate + >; + [TableName.OrgRoles]: KnexOriginal.CompositeTableType; + [TableName.IncidentContact]: KnexOriginal.CompositeTableType< TIncidentContacts, TIncidentContactsInsert, TIncidentContactsUpdate >; - [TableName.UserAction]: Knex.CompositeTableType; - [TableName.SuperAdmin]: Knex.CompositeTableType; - [TableName.ApiKey]: Knex.CompositeTableType; - [TableName.Project]: Knex.CompositeTableType; - [TableName.ProjectMembership]: Knex.CompositeTableType< + [TableName.UserAction]: KnexOriginal.CompositeTableType; + [TableName.SuperAdmin]: KnexOriginal.CompositeTableType; + [TableName.ApiKey]: KnexOriginal.CompositeTableType; + [TableName.Project]: KnexOriginal.CompositeTableType; + [TableName.ProjectMembership]: KnexOriginal.CompositeTableType< TProjectMemberships, TProjectMembershipsInsert, TProjectMembershipsUpdate >; - [TableName.Environment]: Knex.CompositeTableType< + [TableName.Environment]: KnexOriginal.CompositeTableType< TProjectEnvironments, TProjectEnvironmentsInsert, TProjectEnvironmentsUpdate >; - [TableName.ProjectBot]: Knex.CompositeTableType; - [TableName.ProjectUserMembershipRole]: Knex.CompositeTableType< + [TableName.ProjectBot]: KnexOriginal.CompositeTableType; + [TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType< TProjectUserMembershipRoles, TProjectUserMembershipRolesInsert, TProjectUserMembershipRolesUpdate >; - [TableName.ProjectRoles]: Knex.CompositeTableType; - [TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType< + [TableName.ProjectRoles]: KnexOriginal.CompositeTableType; + [TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType< TProjectUserAdditionalPrivilege, TProjectUserAdditionalPrivilegeInsert, TProjectUserAdditionalPrivilegeUpdate >; - [TableName.ProjectKeys]: Knex.CompositeTableType; - [TableName.Secret]: Knex.CompositeTableType; - [TableName.SecretReference]: Knex.CompositeTableType< + [TableName.ProjectKeys]: KnexOriginal.CompositeTableType; + [TableName.Secret]: KnexOriginal.CompositeTableType; + [TableName.SecretReference]: KnexOriginal.CompositeTableType< TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate >; - [TableName.SecretBlindIndex]: Knex.CompositeTableType< + [TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType< TSecretBlindIndexes, TSecretBlindIndexesInsert, TSecretBlindIndexesUpdate >; - [TableName.SecretVersion]: Knex.CompositeTableType; - [TableName.SecretFolder]: Knex.CompositeTableType; - [TableName.SecretFolderVersion]: Knex.CompositeTableType< + [TableName.SecretVersion]: KnexOriginal.CompositeTableType< + TSecretVersions, + TSecretVersionsInsert, + TSecretVersionsUpdate + >; + [TableName.SecretFolder]: KnexOriginal.CompositeTableType< + TSecretFolders, + TSecretFoldersInsert, + TSecretFoldersUpdate + >; + [TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType< TSecretFolderVersions, TSecretFolderVersionsInsert, TSecretFolderVersionsUpdate >; - [TableName.SecretSharing]: Knex.CompositeTableType; - [TableName.SecretTag]: Knex.CompositeTableType; - [TableName.SecretImport]: Knex.CompositeTableType; - [TableName.Integration]: Knex.CompositeTableType; - [TableName.Webhook]: Knex.CompositeTableType; - [TableName.ServiceToken]: Knex.CompositeTableType; - [TableName.IntegrationAuth]: Knex.CompositeTableType< + [TableName.SecretSharing]: KnexOriginal.CompositeTableType< + TSecretSharing, + TSecretSharingInsert, + TSecretSharingUpdate + >; + [TableName.RateLimit]: KnexOriginal.CompositeTableType; + [TableName.SecretTag]: KnexOriginal.CompositeTableType; + [TableName.SecretImport]: KnexOriginal.CompositeTableType< + TSecretImports, + TSecretImportsInsert, + TSecretImportsUpdate + >; + [TableName.Integration]: KnexOriginal.CompositeTableType; + [TableName.Webhook]: KnexOriginal.CompositeTableType; + [TableName.ServiceToken]: KnexOriginal.CompositeTableType< + TServiceTokens, + TServiceTokensInsert, + TServiceTokensUpdate + >; + [TableName.IntegrationAuth]: KnexOriginal.CompositeTableType< TIntegrationAuths, TIntegrationAuthsInsert, TIntegrationAuthsUpdate >; - [TableName.Identity]: Knex.CompositeTableType; - [TableName.IdentityUniversalAuth]: Knex.CompositeTableType< + [TableName.Identity]: KnexOriginal.CompositeTableType; + [TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType< + TIdentityTokenAuths, + TIdentityTokenAuthsInsert, + TIdentityTokenAuthsUpdate + >; + [TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType< TIdentityUniversalAuths, TIdentityUniversalAuthsInsert, TIdentityUniversalAuthsUpdate >; - [TableName.IdentityKubernetesAuth]: Knex.CompositeTableType< + [TableName.IdentityMetadata]: KnexOriginal.CompositeTableType< + TIdentityMetadata, + TIdentityMetadataInsert, + TIdentityMetadataUpdate + >; + [TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType< TIdentityKubernetesAuths, TIdentityKubernetesAuthsInsert, TIdentityKubernetesAuthsUpdate >; - [TableName.IdentityGcpAuth]: Knex.CompositeTableType< + [TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType< TIdentityGcpAuths, TIdentityGcpAuthsInsert, TIdentityGcpAuthsUpdate >; - [TableName.IdentityAwsAuth]: Knex.CompositeTableType< + [TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType< TIdentityAwsAuths, TIdentityAwsAuthsInsert, TIdentityAwsAuthsUpdate >; - [TableName.IdentityAzureAuth]: Knex.CompositeTableType< + [TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType< TIdentityAzureAuths, TIdentityAzureAuthsInsert, TIdentityAzureAuthsUpdate >; - [TableName.IdentityUaClientSecret]: Knex.CompositeTableType< + [TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType< + TIdentityOidcAuths, + TIdentityOidcAuthsInsert, + TIdentityOidcAuthsUpdate + >; + [TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType< TIdentityUaClientSecrets, TIdentityUaClientSecretsInsert, TIdentityUaClientSecretsUpdate >; - [TableName.IdentityAccessToken]: Knex.CompositeTableType< + [TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType< TIdentityAccessTokens, TIdentityAccessTokensInsert, TIdentityAccessTokensUpdate >; - [TableName.IdentityOrgMembership]: Knex.CompositeTableType< + [TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType< TIdentityOrgMemberships, TIdentityOrgMembershipsInsert, TIdentityOrgMembershipsUpdate >; - [TableName.IdentityProjectMembership]: Knex.CompositeTableType< + [TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType< TIdentityProjectMemberships, TIdentityProjectMembershipsInsert, TIdentityProjectMembershipsUpdate >; - [TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType< + [TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType< TIdentityProjectMembershipRole, TIdentityProjectMembershipRoleInsert, TIdentityProjectMembershipRoleUpdate >; - [TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType< + [TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType< TIdentityProjectAdditionalPrivilege, TIdentityProjectAdditionalPrivilegeInsert, TIdentityProjectAdditionalPrivilegeUpdate >; - [TableName.AccessApprovalPolicy]: Knex.CompositeTableType< + [TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType< TAccessApprovalPolicies, TAccessApprovalPoliciesInsert, TAccessApprovalPoliciesUpdate >; - [TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType< + [TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType< TAccessApprovalPoliciesApprovers, TAccessApprovalPoliciesApproversInsert, TAccessApprovalPoliciesApproversUpdate >; - [TableName.AccessApprovalRequest]: Knex.CompositeTableType< + [TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType< TAccessApprovalRequests, TAccessApprovalRequestsInsert, TAccessApprovalRequestsUpdate >; - [TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType< + [TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType< TAccessApprovalRequestsReviewers, TAccessApprovalRequestsReviewersInsert, TAccessApprovalRequestsReviewersUpdate >; - [TableName.ScimToken]: Knex.CompositeTableType; - [TableName.SecretApprovalPolicy]: Knex.CompositeTableType< + [TableName.ScimToken]: KnexOriginal.CompositeTableType; + [TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType< TSecretApprovalPolicies, TSecretApprovalPoliciesInsert, TSecretApprovalPoliciesUpdate >; - [TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType< + [TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType< TSecretApprovalPoliciesApprovers, TSecretApprovalPoliciesApproversInsert, TSecretApprovalPoliciesApproversUpdate >; - [TableName.SecretApprovalRequest]: Knex.CompositeTableType< + [TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType< TSecretApprovalRequests, TSecretApprovalRequestsInsert, TSecretApprovalRequestsUpdate >; - [TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType< + [TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType< TSecretApprovalRequestsReviewers, TSecretApprovalRequestsReviewersInsert, TSecretApprovalRequestsReviewersUpdate >; - [TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType< + [TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType< TSecretApprovalRequestsSecrets, TSecretApprovalRequestsSecretsInsert, TSecretApprovalRequestsSecretsUpdate >; - [TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType< + [TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType< TSecretApprovalRequestSecretTags, TSecretApprovalRequestSecretTagsInsert, TSecretApprovalRequestSecretTagsUpdate >; - [TableName.SecretRotation]: Knex.CompositeTableType< + [TableName.SecretRotation]: KnexOriginal.CompositeTableType< TSecretRotations, TSecretRotationsInsert, TSecretRotationsUpdate >; - [TableName.SecretRotationOutput]: Knex.CompositeTableType< + [TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType< TSecretRotationOutputs, TSecretRotationOutputsInsert, TSecretRotationOutputsUpdate >; - [TableName.Snapshot]: Knex.CompositeTableType; - [TableName.SnapshotSecret]: Knex.CompositeTableType< + [TableName.Snapshot]: KnexOriginal.CompositeTableType< + TSecretSnapshots, + TSecretSnapshotsInsert, + TSecretSnapshotsUpdate + >; + [TableName.SnapshotSecret]: KnexOriginal.CompositeTableType< TSecretSnapshotSecrets, TSecretSnapshotSecretsInsert, TSecretSnapshotSecretsUpdate >; - [TableName.SnapshotFolder]: Knex.CompositeTableType< + [TableName.SnapshotFolder]: KnexOriginal.CompositeTableType< TSecretSnapshotFolders, TSecretSnapshotFoldersInsert, TSecretSnapshotFoldersUpdate >; - [TableName.DynamicSecret]: Knex.CompositeTableType; - [TableName.DynamicSecretLease]: Knex.CompositeTableType< + [TableName.DynamicSecret]: KnexOriginal.CompositeTableType< + TDynamicSecrets, + TDynamicSecretsInsert, + TDynamicSecretsUpdate + >; + [TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType< TDynamicSecretLeases, TDynamicSecretLeasesInsert, TDynamicSecretLeasesUpdate >; - [TableName.SamlConfig]: Knex.CompositeTableType; - [TableName.LdapConfig]: Knex.CompositeTableType; - [TableName.LdapGroupMap]: Knex.CompositeTableType; - [TableName.OrgBot]: Knex.CompositeTableType; - [TableName.AuditLog]: Knex.CompositeTableType; - [TableName.AuditLogStream]: Knex.CompositeTableType< + [TableName.SamlConfig]: KnexOriginal.CompositeTableType; + [TableName.OidcConfig]: KnexOriginal.CompositeTableType; + [TableName.LdapConfig]: KnexOriginal.CompositeTableType; + [TableName.LdapGroupMap]: KnexOriginal.CompositeTableType< + TLdapGroupMaps, + TLdapGroupMapsInsert, + TLdapGroupMapsUpdate + >; + [TableName.OrgBot]: KnexOriginal.CompositeTableType; + [TableName.AuditLog]: KnexOriginal.CompositeTableType; + [TableName.AuditLogStream]: KnexOriginal.CompositeTableType< TAuditLogStreams, TAuditLogStreamsInsert, TAuditLogStreamsUpdate >; - [TableName.GitAppInstallSession]: Knex.CompositeTableType< + [TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType< TGitAppInstallSessions, TGitAppInstallSessionsInsert, TGitAppInstallSessionsUpdate >; - [TableName.GitAppOrg]: Knex.CompositeTableType; - [TableName.SecretScanningGitRisk]: Knex.CompositeTableType< + [TableName.GitAppOrg]: KnexOriginal.CompositeTableType; + [TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType< TSecretScanningGitRisks, TSecretScanningGitRisksInsert, TSecretScanningGitRisksUpdate >; - [TableName.TrustedIps]: Knex.CompositeTableType; + [TableName.TrustedIps]: KnexOriginal.CompositeTableType; + [TableName.SecretV2]: KnexOriginal.CompositeTableType; + [TableName.SecretVersionV2]: KnexOriginal.CompositeTableType< + TSecretVersionsV2, + TSecretVersionsV2Insert, + TSecretVersionsV2Update + >; + [TableName.SecretReferenceV2]: KnexOriginal.CompositeTableType< + TSecretReferencesV2, + TSecretReferencesV2Insert, + TSecretReferencesV2Update + >; // Junction tables - [TableName.JnSecretTag]: Knex.CompositeTableType< + [TableName.SecretV2JnTag]: KnexOriginal.CompositeTableType< + TSecretV2TagJunction, + TSecretV2TagJunctionInsert, + TSecretV2TagJunctionUpdate + >; + [TableName.JnSecretTag]: KnexOriginal.CompositeTableType< TSecretTagJunction, TSecretTagJunctionInsert, TSecretTagJunctionUpdate >; - [TableName.SecretVersionTag]: Knex.CompositeTableType< + [TableName.SecretVersionTag]: KnexOriginal.CompositeTableType< TSecretVersionTagJunction, TSecretVersionTagJunctionInsert, TSecretVersionTagJunctionUpdate >; + [TableName.SecretVersionV2Tag]: KnexOriginal.CompositeTableType< + TSecretVersionV2TagJunction, + TSecretVersionV2TagJunctionInsert, + TSecretVersionV2TagJunctionUpdate + >; + [TableName.SnapshotSecretV2]: KnexOriginal.CompositeTableType< + TSecretSnapshotSecretsV2, + TSecretSnapshotSecretsV2Insert, + TSecretSnapshotSecretsV2Update + >; + [TableName.SecretApprovalRequestSecretV2]: KnexOriginal.CompositeTableType< + TSecretApprovalRequestsSecretsV2, + TSecretApprovalRequestsSecretsV2Insert, + TSecretApprovalRequestsSecretsV2Update + >; + [TableName.SecretApprovalRequestSecretTagV2]: KnexOriginal.CompositeTableType< + TSecretApprovalRequestSecretTagsV2, + TSecretApprovalRequestSecretTagsV2Insert, + TSecretApprovalRequestSecretTagsV2Update + >; + [TableName.SecretRotationOutputV2]: KnexOriginal.CompositeTableType< + TSecretRotationOutputV2, + TSecretRotationOutputV2Insert, + TSecretRotationOutputV2Update + >; // KMS service - [TableName.KmsServerRootConfig]: Knex.CompositeTableType< + [TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType< TKmsRootConfig, TKmsRootConfigInsert, TKmsRootConfigUpdate >; - [TableName.KmsKey]: Knex.CompositeTableType; - [TableName.KmsKeyVersion]: Knex.CompositeTableType; + [TableName.InternalKms]: KnexOriginal.CompositeTableType; + [TableName.ExternalKms]: KnexOriginal.CompositeTableType; + [TableName.KmsKey]: KnexOriginal.CompositeTableType; + [TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType< + TKmsKeyVersions, + TKmsKeyVersionsInsert, + TKmsKeyVersionsUpdate + >; + [TableName.SlackIntegrations]: KnexOriginal.CompositeTableType< + TSlackIntegrations, + TSlackIntegrationsInsert, + TSlackIntegrationsUpdate + >; + [TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType< + TProjectSlackConfigs, + TProjectSlackConfigsInsert, + TProjectSlackConfigsUpdate + >; + [TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType< + TWorkflowIntegrations, + TWorkflowIntegrationsInsert, + TWorkflowIntegrationsUpdate + >; + [TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType< + TExternalGroupOrgRoleMappings, + TExternalGroupOrgRoleMappingsInsert, + TExternalGroupOrgRoleMappingsUpdate + >; + [TableName.ProjectTemplates]: KnexOriginal.CompositeTableType< + TProjectTemplates, + TProjectTemplatesInsert, + TProjectTemplatesUpdate + >; + [TableName.TotpConfig]: KnexOriginal.CompositeTableType; } } diff --git a/backend/src/@types/ldif.d.ts b/backend/src/@types/ldif.d.ts new file mode 100644 index 0000000000..7ba40fba55 --- /dev/null +++ b/backend/src/@types/ldif.d.ts @@ -0,0 +1,4 @@ +declare module "ldif" { + // eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`. + function parse(input: string, ...args: any[]): any; +} diff --git a/backend/src/db/auditlog-knexfile.ts b/backend/src/db/auditlog-knexfile.ts new file mode 100644 index 0000000000..3ceef65a0d --- /dev/null +++ b/backend/src/db/auditlog-knexfile.ts @@ -0,0 +1,75 @@ +// eslint-disable-next-line +import "ts-node/register"; + +import dotenv from "dotenv"; +import type { Knex } from "knex"; +import path from "path"; + +// Update with your config settings. . +dotenv.config({ + path: path.join(__dirname, "../../../.env.migration") +}); +dotenv.config({ + path: path.join(__dirname, "../../../.env") +}); + +if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) { + console.info("Dedicated audit log database not found. No further migrations necessary"); + process.exit(0); +} + +console.info("Executing migration on audit log database..."); + +export default { + development: { + client: "postgres", + connection: { + connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI, + host: process.env.AUDIT_LOGS_DB_HOST, + port: process.env.AUDIT_LOGS_DB_PORT, + user: process.env.AUDIT_LOGS_DB_USER, + database: process.env.AUDIT_LOGS_DB_NAME, + password: process.env.AUDIT_LOGS_DB_PASSWORD, + ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT + ? { + rejectUnauthorized: true, + ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii") + } + : false + }, + pool: { + min: 2, + max: 10 + }, + seeds: { + directory: "./seeds" + }, + migrations: { + tableName: "infisical_migrations" + } + }, + production: { + client: "postgres", + connection: { + connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI, + host: process.env.AUDIT_LOGS_DB_HOST, + port: process.env.AUDIT_LOGS_DB_PORT, + user: process.env.AUDIT_LOGS_DB_USER, + database: process.env.AUDIT_LOGS_DB_NAME, + password: process.env.AUDIT_LOGS_DB_PASSWORD, + ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT + ? { + rejectUnauthorized: true, + ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii") + } + : false + }, + pool: { + min: 2, + max: 10 + }, + migrations: { + tableName: "infisical_migrations" + } + } +} as Knex.Config; diff --git a/backend/src/db/index.ts b/backend/src/db/index.ts index 75992e2c69..abebdf65a8 100644 --- a/backend/src/db/index.ts +++ b/backend/src/db/index.ts @@ -1,2 +1,2 @@ export type { TDbClient } from "./instance"; -export { initDbConnection } from "./instance"; +export { initAuditLogDbConnection, initDbConnection } from "./instance"; diff --git a/backend/src/db/instance.ts b/backend/src/db/instance.ts index bd4ce99c16..d4a2a5b2ca 100644 --- a/backend/src/db/instance.ts +++ b/backend/src/db/instance.ts @@ -1,8 +1,38 @@ -import knex from "knex"; +import knex, { Knex } from "knex"; export type TDbClient = ReturnType; -export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => { - const db = knex({ +export const initDbConnection = ({ + dbConnectionUri, + dbRootCert, + readReplicas = [] +}: { + dbConnectionUri: string; + dbRootCert?: string; + readReplicas?: { + dbConnectionUri: string; + dbRootCert?: string; + }[]; +}) => { + // akhilmhdh: the default Knex is knex.Knex. but when assigned with knex({}) the value is knex.Knex + // this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[] + // eslint-disable-next-line + let db: Knex; + // eslint-disable-next-line + let readReplicaDbs: Knex[]; + // @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance + knex.QueryBuilder.extend("primaryNode", () => { + return db; + }); + + // @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance + knex.QueryBuilder.extend("replicaNode", () => { + if (!readReplicaDbs.length) return db; + + const selectedReplica = readReplicaDbs[Math.floor(Math.random() * readReplicaDbs.length)]; + return selectedReplica; + }); + + db = knex({ client: "pg", connection: { connectionString: dbConnectionUri, @@ -22,5 +52,63 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection } }); + readReplicaDbs = readReplicas.map((el) => { + const replicaDbCertificate = el.dbRootCert || dbRootCert; + return knex({ + client: "pg", + connection: { + connectionString: el.dbConnectionUri, + ssl: replicaDbCertificate + ? { + rejectUnauthorized: true, + ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii") + } + : false + } + }); + }); + + return db; +}; + +export const initAuditLogDbConnection = ({ + dbConnectionUri, + dbRootCert +}: { + dbConnectionUri: string; + dbRootCert?: string; +}) => { + // akhilmhdh: the default Knex is knex.Knex. but when assigned with knex({}) the value is knex.Knex + // this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[] + // eslint-disable-next-line + const db: Knex = knex({ + client: "pg", + connection: { + connectionString: dbConnectionUri, + host: process.env.AUDIT_LOGS_DB_HOST, + // @ts-expect-error I have no clue why only for the port there is a type error + // eslint-disable-next-line + port: process.env.AUDIT_LOGS_DB_PORT, + user: process.env.AUDIT_LOGS_DB_USER, + database: process.env.AUDIT_LOGS_DB_NAME, + password: process.env.AUDIT_LOGS_DB_PASSWORD, + ssl: dbRootCert + ? { + rejectUnauthorized: true, + ca: Buffer.from(dbRootCert, "base64").toString("ascii") + } + : false + } + }); + + // we add these overrides so that auditLogDb and the primary DB are interchangeable + db.primaryNode = () => { + return db; + }; + + db.replicaNode = () => { + return db; + }; + return db; }; diff --git a/backend/src/db/manual-migrations/partition-audit-logs.ts b/backend/src/db/manual-migrations/partition-audit-logs.ts new file mode 100644 index 0000000000..382ef0dbff --- /dev/null +++ b/backend/src/db/manual-migrations/partition-audit-logs.ts @@ -0,0 +1,161 @@ +import kx, { Knex } from "knex"; + +import { TableName } from "../schemas"; + +const INTERMEDIATE_AUDIT_LOG_TABLE = "intermediate_audit_logs"; + +const formatPartitionDate = (date: Date) => { + const year = date.getFullYear(); + const month = String(date.getMonth() + 1).padStart(2, "0"); + const day = String(date.getDate()).padStart(2, "0"); + + return `${year}-${month}-${day}`; +}; + +const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => { + const startDateStr = formatPartitionDate(startDate); + const endDateStr = formatPartitionDate(endDate); + + const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`; + + await knex.schema.raw( + `CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')` + ); +}; + +const up = async (knex: Knex): Promise => { + console.info("Dropping primary key of audit log table..."); + await knex.schema.alterTable(TableName.AuditLog, (t) => { + // remove existing keys + t.dropPrimary(); + }); + + // Get all indices of the audit log table and drop them + const indexNames: { rows: { indexname: string }[] } = await knex.raw( + ` + SELECT indexname + FROM pg_indexes + WHERE tablename = '${TableName.AuditLog}' + ` + ); + + console.log( + "Deleting existing audit log indices:", + indexNames.rows.map((e) => e.indexname) + ); + + for await (const row of indexNames.rows) { + await knex.raw(`DROP INDEX IF EXISTS ${row.indexname}`); + } + + // renaming audit log to intermediate table + console.log("Renaming audit log table to the intermediate name"); + await knex.schema.renameTable(TableName.AuditLog, INTERMEDIATE_AUDIT_LOG_TABLE); + + if (!(await knex.schema.hasTable(TableName.AuditLog))) { + const createTableSql = knex.schema + .createTable(TableName.AuditLog, (t) => { + t.uuid("id").defaultTo(knex.fn.uuid()); + t.string("actor").notNullable(); + t.jsonb("actorMetadata").notNullable(); + t.string("ipAddress"); + t.string("eventType").notNullable(); + t.jsonb("eventMetadata"); + t.string("userAgent"); + t.string("userAgentType"); + t.datetime("expiresAt"); + t.timestamps(true, true, true); + t.uuid("orgId"); + t.string("projectId"); + t.string("projectName"); + t.primary(["id", "createdAt"]); + }) + .toString(); + + console.info("Creating partition table..."); + await knex.schema.raw(` + ${createTableSql} PARTITION BY RANGE ("createdAt"); + `); + + console.log("Adding indices..."); + await knex.schema.alterTable(TableName.AuditLog, (t) => { + t.index(["projectId", "createdAt"]); + t.index(["orgId", "createdAt"]); + t.index("expiresAt"); + t.index("orgId"); + t.index("projectId"); + }); + + console.log("Adding GIN indices..."); + + await knex.raw( + `CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)` + ); + console.log("GIN index for actorMetadata done"); + + await knex.raw( + `CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.AuditLog} USING gin("eventMetadata" jsonb_path_ops)` + ); + console.log("GIN index for eventMetadata done"); + + // create default partition + console.log("Creating default partition..."); + await knex.schema.raw(`CREATE TABLE ${TableName.AuditLog}_default PARTITION OF ${TableName.AuditLog} DEFAULT`); + + const nextDate = new Date(); + nextDate.setDate(nextDate.getDate() + 1); + const nextDateStr = formatPartitionDate(nextDate); + + console.log("Attaching existing audit log table as a partition..."); + await knex.schema.raw(` + ALTER TABLE ${INTERMEDIATE_AUDIT_LOG_TABLE} ADD CONSTRAINT audit_log_old + CHECK ( "createdAt" < DATE '${nextDateStr}' ); + + ALTER TABLE ${TableName.AuditLog} ATTACH PARTITION ${INTERMEDIATE_AUDIT_LOG_TABLE} + FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' ); + `); + + // create partition from now until end of month + console.log("Creating audit log partitions ahead of time... next date:", nextDateStr); + await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1)); + + // create partitions 4 years ahead + const partitionMonths = 4 * 12; + const partitionPromises: Promise[] = []; + for (let x = 1; x <= partitionMonths; x += 1) { + partitionPromises.push( + createAuditLogPartition( + knex, + new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1), + new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1) + ) + ); + } + + await Promise.all(partitionPromises); + console.log("Partition migration complete"); + } +}; + +export const executeMigration = async (url: string) => { + console.log("Executing migration..."); + const knex = kx({ + client: "pg", + connection: url + }); + + await knex.transaction(async (tx) => { + await up(tx); + }); +}; + +const dbUrl = process.env.AUDIT_LOGS_DB_CONNECTION_URI; +if (!dbUrl) { + console.error("Please provide a DB connection URL to the AUDIT_LOGS_DB_CONNECTION_URI env"); + process.exit(1); +} + +void executeMigration(dbUrl).then(() => { + console.log("Migration: partition-audit-logs DONE"); + process.exit(0); +}); diff --git a/backend/src/db/migrations/20231222172455_integration.ts b/backend/src/db/migrations/20231222172455_integration.ts index 43fd9986ac..ad0cec254d 100644 --- a/backend/src/db/migrations/20231222172455_integration.ts +++ b/backend/src/db/migrations/20231222172455_integration.ts @@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise { t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); t.string("integration").notNullable(); t.string("teamId"); // vercel-specific - t.string("url"); // for self hosted + t.string("url"); // for self-hosted t.string("namespace"); // hashicorp specific t.string("accountId"); // netlify t.text("refreshCiphertext"); @@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise { await knex.schema.createTable(TableName.Integration, (t) => { t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); t.boolean("isActive").notNullable(); - t.string("url"); // self hosted + t.string("url"); // self-hosted t.string("app"); // name of app in provider t.string("appId"); t.string("targetEnvironment"); diff --git a/backend/src/db/migrations/20240609133400_private-key-handoff.ts b/backend/src/db/migrations/20240609133400_private-key-handoff.ts new file mode 100644 index 0000000000..9741c60292 --- /dev/null +++ b/backend/src/db/migrations/20240609133400_private-key-handoff.ts @@ -0,0 +1,61 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword"); + const doesPrivateKeyFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKey" + ); + const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKeyIV" + ); + const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKeyTag" + ); + const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKeyEncoding" + ); + if (await knex.schema.hasTable(TableName.UserEncryptionKey)) { + await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => { + if (!doesPasswordFieldExist) t.string("hashedPassword"); + if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey"); + if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV"); + if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag"); + if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding"); + }); + } +} + +export async function down(knex: Knex): Promise { + const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword"); + const doesPrivateKeyFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKey" + ); + const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKeyIV" + ); + const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKeyTag" + ); + const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn( + TableName.UserEncryptionKey, + "serverEncryptedPrivateKeyEncoding" + ); + if (await knex.schema.hasTable(TableName.UserEncryptionKey)) { + await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => { + if (doesPasswordFieldExist) t.dropColumn("hashedPassword"); + if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey"); + if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV"); + if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag"); + if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding"); + }); + } +} diff --git a/backend/src/db/migrations/20240612200518_add-pit-version-limit.ts b/backend/src/db/migrations/20240612200518_add-pit-version-limit.ts new file mode 100644 index 0000000000..e37c24e2c8 --- /dev/null +++ b/backend/src/db/migrations/20240612200518_add-pit-version-limit.ts @@ -0,0 +1,21 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit"); + await knex.schema.alterTable(TableName.Project, (tb) => { + if (!hasPitVersionLimitColumn) { + tb.integer("pitVersionLimit").notNullable().defaultTo(10); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit"); + await knex.schema.alterTable(TableName.Project, (tb) => { + if (hasPitVersionLimitColumn) { + tb.dropColumn("pitVersionLimit"); + } + }); +} diff --git a/backend/src/db/migrations/20240614010847_custom-rate-limits-for-self-hosting.ts b/backend/src/db/migrations/20240614010847_custom-rate-limits-for-self-hosting.ts new file mode 100644 index 0000000000..c34b2d1968 --- /dev/null +++ b/backend/src/db/migrations/20240614010847_custom-rate-limits-for-self-hosting.ts @@ -0,0 +1,31 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.RateLimit))) { + await knex.schema.createTable(TableName.RateLimit, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.integer("readRateLimit").defaultTo(600).notNullable(); + t.integer("writeRateLimit").defaultTo(200).notNullable(); + t.integer("secretsRateLimit").defaultTo(60).notNullable(); + t.integer("authRateLimit").defaultTo(60).notNullable(); + t.integer("inviteUserRateLimit").defaultTo(30).notNullable(); + t.integer("mfaRateLimit").defaultTo(20).notNullable(); + t.integer("creationLimit").defaultTo(30).notNullable(); + t.integer("publicEndpointLimit").defaultTo(30).notNullable(); + t.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.RateLimit); + + // create init rate limit entry with defaults + await knex(TableName.RateLimit).insert({}); + } +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.RateLimit); + await dropOnUpdateTrigger(knex, TableName.RateLimit); +} diff --git a/backend/src/db/migrations/20240614115952_tag-machine-identity.ts b/backend/src/db/migrations/20240614115952_tag-machine-identity.ts new file mode 100644 index 0000000000..fd11928b61 --- /dev/null +++ b/backend/src/db/migrations/20240614115952_tag-machine-identity.ts @@ -0,0 +1,25 @@ +import { Knex } from "knex"; + +import { ActorType } from "@app/services/auth/auth-type"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType"); + await knex.schema.alterTable(TableName.SecretTag, (tb) => { + if (!hasCreatedByActorType) { + tb.string("createdByActorType").notNullable().defaultTo(ActorType.USER); + tb.dropForeign("createdBy"); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType"); + await knex.schema.alterTable(TableName.SecretTag, (tb) => { + if (hasCreatedByActorType) { + tb.dropColumn("createdByActorType"); + tb.foreign("createdBy").references("id").inTable(TableName.Users).onDelete("SET NULL"); + } + }); +} diff --git a/backend/src/db/migrations/20240614154212_certificate-mgmt.ts b/backend/src/db/migrations/20240614154212_certificate-mgmt.ts new file mode 100644 index 0000000000..a738a6b645 --- /dev/null +++ b/backend/src/db/migrations/20240614154212_certificate-mgmt.ts @@ -0,0 +1,137 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.Project)) { + const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId"); + await knex.schema.alterTable(TableName.Project, (t) => { + if (!doesProjectCertificateKeyIdExist) { + t.uuid("kmsCertificateKeyId").nullable(); + t.foreign("kmsCertificateKeyId").references("id").inTable(TableName.KmsKey); + } + }); + } + + if (!(await knex.schema.hasTable(TableName.CertificateAuthority))) { + await knex.schema.createTable(TableName.CertificateAuthority, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.uuid("parentCaId").nullable(); + t.foreign("parentCaId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE"); + t.string("projectId").notNullable(); + t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + t.string("type").notNullable(); // root / intermediate + t.string("status").notNullable(); // active / pending-certificate + t.string("friendlyName").notNullable(); + t.string("organization").notNullable(); + t.string("ou").notNullable(); + t.string("country").notNullable(); + t.string("province").notNullable(); + t.string("locality").notNullable(); + t.string("commonName").notNullable(); + t.string("dn").notNullable(); + t.string("serialNumber").nullable().unique(); + t.integer("maxPathLength").nullable(); + t.string("keyAlgorithm").notNullable(); + t.datetime("notBefore").nullable(); + t.datetime("notAfter").nullable(); + }); + } + + if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCert))) { + // table to keep track of certificates belonging to CA + await knex.schema.createTable(TableName.CertificateAuthorityCert, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.uuid("caId").notNullable().unique(); + t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE"); + t.binary("encryptedCertificate").notNullable(); + t.binary("encryptedCertificateChain").notNullable(); + }); + } + + if (!(await knex.schema.hasTable(TableName.CertificateAuthoritySecret))) { + await knex.schema.createTable(TableName.CertificateAuthoritySecret, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.uuid("caId").notNullable().unique(); + t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE"); + t.binary("encryptedPrivateKey").notNullable(); + }); + } + + if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCrl))) { + await knex.schema.createTable(TableName.CertificateAuthorityCrl, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.uuid("caId").notNullable().unique(); + t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE"); + t.binary("encryptedCrl").notNullable(); + }); + } + + if (!(await knex.schema.hasTable(TableName.Certificate))) { + await knex.schema.createTable(TableName.Certificate, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.uuid("caId").notNullable(); + t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE"); + t.string("status").notNullable(); // active / pending-certificate + t.string("serialNumber").notNullable().unique(); + t.string("friendlyName").notNullable(); + t.string("commonName").notNullable(); + t.datetime("notBefore").notNullable(); + t.datetime("notAfter").notNullable(); + t.datetime("revokedAt").nullable(); + t.integer("revocationReason").nullable(); // integer based on crl reason in RFC 5280 + }); + } + + if (!(await knex.schema.hasTable(TableName.CertificateBody))) { + await knex.schema.createTable(TableName.CertificateBody, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.uuid("certId").notNullable().unique(); + t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE"); + t.binary("encryptedCertificate").notNullable(); + }); + } + + await createOnUpdateTrigger(knex, TableName.CertificateAuthority); + await createOnUpdateTrigger(knex, TableName.CertificateAuthorityCert); + await createOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret); + await createOnUpdateTrigger(knex, TableName.Certificate); + await createOnUpdateTrigger(knex, TableName.CertificateBody); +} + +export async function down(knex: Knex): Promise { + // project + if (await knex.schema.hasTable(TableName.Project)) { + const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId"); + await knex.schema.alterTable(TableName.Project, (t) => { + if (doesProjectCertificateKeyIdExist) t.dropColumn("kmsCertificateKeyId"); + }); + } + + // certificates + await knex.schema.dropTableIfExists(TableName.CertificateBody); + await dropOnUpdateTrigger(knex, TableName.CertificateBody); + + await knex.schema.dropTableIfExists(TableName.Certificate); + await dropOnUpdateTrigger(knex, TableName.Certificate); + + // certificate authorities + await knex.schema.dropTableIfExists(TableName.CertificateAuthoritySecret); + await dropOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret); + + await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCrl); + await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCrl); + + await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCert); + await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCert); + + await knex.schema.dropTableIfExists(TableName.CertificateAuthority); + await dropOnUpdateTrigger(knex, TableName.CertificateAuthority); +} diff --git a/backend/src/db/migrations/20240614184133_make-secret-sharing-public.ts b/backend/src/db/migrations/20240614184133_make-secret-sharing-public.ts new file mode 100644 index 0000000000..dc2756b744 --- /dev/null +++ b/backend/src/db/migrations/20240614184133_make-secret-sharing-public.ts @@ -0,0 +1,27 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId"); + const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId"); + + if (await knex.schema.hasTable(TableName.SecretSharing)) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + if (hasOrgIdColumn) t.uuid("orgId").nullable().alter(); + if (hasUserIdColumn) t.uuid("userId").nullable().alter(); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId"); + const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId"); + + if (await knex.schema.hasTable(TableName.SecretSharing)) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + if (hasOrgIdColumn) t.uuid("orgId").notNullable().alter(); + if (hasUserIdColumn) t.uuid("userId").notNullable().alter(); + }); + } +} diff --git a/backend/src/db/migrations/20240624161942_add-oidc-auth.ts b/backend/src/db/migrations/20240624161942_add-oidc-auth.ts new file mode 100644 index 0000000000..3f4b0636d2 --- /dev/null +++ b/backend/src/db/migrations/20240624161942_add-oidc-auth.ts @@ -0,0 +1,49 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.OidcConfig))) { + await knex.schema.createTable(TableName.OidcConfig, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.string("discoveryURL"); + tb.string("issuer"); + tb.string("authorizationEndpoint"); + tb.string("jwksUri"); + tb.string("tokenEndpoint"); + tb.string("userinfoEndpoint"); + tb.text("encryptedClientId").notNullable(); + tb.string("configurationType").notNullable(); + tb.string("clientIdIV").notNullable(); + tb.string("clientIdTag").notNullable(); + tb.text("encryptedClientSecret").notNullable(); + tb.string("clientSecretIV").notNullable(); + tb.string("clientSecretTag").notNullable(); + tb.string("allowedEmailDomains").nullable(); + tb.boolean("isActive").notNullable(); + tb.timestamps(true, true, true); + tb.uuid("orgId").notNullable().unique(); + tb.foreign("orgId").references("id").inTable(TableName.Organization); + }); + } + + if (await knex.schema.hasTable(TableName.SuperAdmin)) { + if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails"))) { + await knex.schema.alterTable(TableName.SuperAdmin, (tb) => { + tb.boolean("trustOidcEmails").defaultTo(false); + }); + } + } +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.OidcConfig); + + if (await knex.schema.hasTable(TableName.SuperAdmin)) { + if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails")) { + await knex.schema.alterTable(TableName.SuperAdmin, (t) => { + t.dropColumn("trustOidcEmails"); + }); + } + } +} diff --git a/backend/src/db/migrations/20240624172027_default-saml-ldap-org.ts b/backend/src/db/migrations/20240624172027_default-saml-ldap-org.ts new file mode 100644 index 0000000000..fec132df42 --- /dev/null +++ b/backend/src/db/migrations/20240624172027_default-saml-ldap-org.ts @@ -0,0 +1,27 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +const DEFAULT_AUTH_ORG_ID_FIELD = "defaultAuthOrgId"; + +export async function up(knex: Knex): Promise { + const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD); + + await knex.schema.alterTable(TableName.SuperAdmin, (t) => { + if (!hasDefaultOrgColumn) { + t.uuid(DEFAULT_AUTH_ORG_ID_FIELD).nullable(); + t.foreign(DEFAULT_AUTH_ORG_ID_FIELD).references("id").inTable(TableName.Organization).onDelete("SET NULL"); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD); + + await knex.schema.alterTable(TableName.SuperAdmin, (t) => { + if (hasDefaultOrgColumn) { + t.dropForeign([DEFAULT_AUTH_ORG_ID_FIELD]); + t.dropColumn(DEFAULT_AUTH_ORG_ID_FIELD); + } + }); +} diff --git a/backend/src/db/migrations/20240624221840_certificate-alt-names.ts b/backend/src/db/migrations/20240624221840_certificate-alt-names.ts new file mode 100644 index 0000000000..fa076b7b4b --- /dev/null +++ b/backend/src/db/migrations/20240624221840_certificate-alt-names.ts @@ -0,0 +1,24 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.Certificate)) { + const hasAltNamesColumn = await knex.schema.hasColumn(TableName.Certificate, "altNames"); + if (!hasAltNamesColumn) { + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.string("altNames").defaultTo(""); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.Certificate)) { + if (await knex.schema.hasColumn(TableName.Certificate, "altNames")) { + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.dropColumn("altNames"); + }); + } + } +} diff --git a/backend/src/db/migrations/20240626111536_integration-auth-aws-assume-role.ts b/backend/src/db/migrations/20240626111536_integration-auth-aws-assume-role.ts new file mode 100644 index 0000000000..0d556a1c08 --- /dev/null +++ b/backend/src/db/migrations/20240626111536_integration-auth-aws-assume-role.ts @@ -0,0 +1,35 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn( + TableName.IntegrationAuth, + "awsAssumeIamRoleArnCipherText" + ); + const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV"); + const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag"); + if (await knex.schema.hasTable(TableName.IntegrationAuth)) { + await knex.schema.alterTable(TableName.IntegrationAuth, (t) => { + if (!hasAwsAssumeRoleCipherText) t.text("awsAssumeIamRoleArnCipherText"); + if (!hasAwsAssumeRoleIV) t.text("awsAssumeIamRoleArnIV"); + if (!hasAwsAssumeRoleTag) t.text("awsAssumeIamRoleArnTag"); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn( + TableName.IntegrationAuth, + "awsAssumeIamRoleArnCipherText" + ); + const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV"); + const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag"); + if (await knex.schema.hasTable(TableName.IntegrationAuth)) { + await knex.schema.alterTable(TableName.IntegrationAuth, (t) => { + if (hasAwsAssumeRoleCipherText) t.dropColumn("awsAssumeIamRoleArnCipherText"); + if (hasAwsAssumeRoleIV) t.dropColumn("awsAssumeIamRoleArnIV"); + if (hasAwsAssumeRoleTag) t.dropColumn("awsAssumeIamRoleArnTag"); + }); + } +} diff --git a/backend/src/db/migrations/20240626115035_admin-login-method-config.ts b/backend/src/db/migrations/20240626115035_admin-login-method-config.ts new file mode 100644 index 0000000000..8748fe753e --- /dev/null +++ b/backend/src/db/migrations/20240626115035_admin-login-method-config.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods"))) { + await knex.schema.alterTable(TableName.SuperAdmin, (tb) => { + tb.specificType("enabledLoginMethods", "text[]"); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods")) { + await knex.schema.alterTable(TableName.SuperAdmin, (t) => { + t.dropColumn("enabledLoginMethods"); + }); + } +} diff --git a/backend/src/db/migrations/20240626171758_add-ldap-unique-user-attribute.ts b/backend/src/db/migrations/20240626171758_add-ldap-unique-user-attribute.ts new file mode 100644 index 0000000000..dc87ff5153 --- /dev/null +++ b/backend/src/db/migrations/20240626171758_add-ldap-unique-user-attribute.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute"))) { + await knex.schema.alterTable(TableName.LdapConfig, (tb) => { + tb.string("uniqueUserAttribute").notNullable().defaultTo(""); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute")) { + await knex.schema.alterTable(TableName.LdapConfig, (t) => { + t.dropColumn("uniqueUserAttribute"); + }); + } +} diff --git a/backend/src/db/migrations/20240626171943_configurable-audit-log-retention.ts b/backend/src/db/migrations/20240626171943_configurable-audit-log-retention.ts new file mode 100644 index 0000000000..6ac4b6fe1c --- /dev/null +++ b/backend/src/db/migrations/20240626171943_configurable-audit-log-retention.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays"))) { + await knex.schema.alterTable(TableName.Project, (tb) => { + tb.integer("auditLogsRetentionDays").nullable(); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays")) { + await knex.schema.alterTable(TableName.Project, (t) => { + t.dropColumn("auditLogsRetentionDays"); + }); + } +} diff --git a/backend/src/db/migrations/20240627173239_add-oidc-updated-at-trigger.ts b/backend/src/db/migrations/20240627173239_add-oidc-updated-at-trigger.ts new file mode 100644 index 0000000000..e0d93d8a29 --- /dev/null +++ b/backend/src/db/migrations/20240627173239_add-oidc-updated-at-trigger.ts @@ -0,0 +1,12 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + await createOnUpdateTrigger(knex, TableName.OidcConfig); +} + +export async function down(knex: Knex): Promise { + await dropOnUpdateTrigger(knex, TableName.OidcConfig); +} diff --git a/backend/src/db/migrations/20240701143900_member-project-favorite.ts b/backend/src/db/migrations/20240701143900_member-project-favorite.ts new file mode 100644 index 0000000000..0021cca4cb --- /dev/null +++ b/backend/src/db/migrations/20240701143900_member-project-favorite.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites"))) { + await knex.schema.alterTable(TableName.OrgMembership, (tb) => { + tb.specificType("projectFavorites", "text[]"); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites")) { + await knex.schema.alterTable(TableName.OrgMembership, (t) => { + t.dropColumn("projectFavorites"); + }); + } +} diff --git a/backend/src/db/migrations/20240702055253_add-encrypted-webhook-url.ts b/backend/src/db/migrations/20240702055253_add-encrypted-webhook-url.ts new file mode 100644 index 0000000000..8762dde10a --- /dev/null +++ b/backend/src/db/migrations/20240702055253_add-encrypted-webhook-url.ts @@ -0,0 +1,53 @@ +import { Knex } from "knex"; + +import { WebhookType } from "@app/services/webhook/webhook-types"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText"); + const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV"); + const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag"); + const hasType = await knex.schema.hasColumn(TableName.Webhook, "type"); + + if (await knex.schema.hasTable(TableName.Webhook)) { + await knex.schema.alterTable(TableName.Webhook, (tb) => { + if (!hasUrlCipherText) { + tb.text("urlCipherText"); + } + if (!hasUrlIV) { + tb.string("urlIV"); + } + if (!hasUrlTag) { + tb.string("urlTag"); + } + if (!hasType) { + tb.string("type").defaultTo(WebhookType.GENERAL); + } + }); + } +} + +export async function down(knex: Knex): Promise { + const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText"); + const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV"); + const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag"); + const hasType = await knex.schema.hasColumn(TableName.Webhook, "type"); + + if (await knex.schema.hasTable(TableName.Webhook)) { + await knex.schema.alterTable(TableName.Webhook, (t) => { + if (hasUrlCipherText) { + t.dropColumn("urlCipherText"); + } + if (hasUrlIV) { + t.dropColumn("urlIV"); + } + if (hasUrlTag) { + t.dropColumn("urlTag"); + } + if (hasType) { + t.dropColumn("type"); + } + }); + } +} diff --git a/backend/src/db/migrations/20240702131735_secret-approval-groups.ts b/backend/src/db/migrations/20240702131735_secret-approval-groups.ts new file mode 100644 index 0000000000..84824ac652 --- /dev/null +++ b/backend/src/db/migrations/20240702131735_secret-approval-groups.ts @@ -0,0 +1,221 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + // migrate secret approval policy approvers to user id + const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId"); + const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId"); + if (!hasApproverUserId) { + // add the new fields + await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => { + // if (hasApproverId) tb.setNullable("approverId"); + tb.uuid("approverUserId"); + tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE"); + }); + + // convert project membership id => user id + await knex(TableName.SecretApprovalPolicyApprover).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + approverUserId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverId`])) + }); + // drop the old field + await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => { + if (hasApproverId) tb.dropColumn("approverId"); + tb.uuid("approverUserId").notNullable().alter(); + }); + } + + // migrate secret approval request committer and statusChangeBy to user id + const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest); + const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId"); + const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId"); + const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy"); + const hasStatusChangedByUserId = await knex.schema.hasColumn( + TableName.SecretApprovalRequest, + "statusChangedByUserId" + ); + if (hasSecretApprovalRequestTable) { + // new fields + await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => { + // if (hasCommitterId) tb.setNullable("committerId"); + if (!hasCommitterUserId) { + tb.uuid("committerUserId"); + tb.foreign("committerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL"); + } + if (!hasStatusChangedByUserId) { + tb.uuid("statusChangedByUserId"); + tb.foreign("statusChangedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL"); + } + }); + + // copy the assigned project membership => user id to new fields + await knex(TableName.SecretApprovalRequest).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + committerUserId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerId`])), + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + statusChangedByUserId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangeBy`])) + }); + // drop old fields + await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => { + if (hasStatusChangeBy) tb.dropColumn("statusChangeBy"); + if (hasCommitterId) tb.dropColumn("committerId"); + tb.uuid("committerUserId").notNullable().alter(); + }); + } + + // migrate secret approval request reviewer to user id + const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member"); + const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId"); + if (!hasReviewerUserId) { + // new fields + await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => { + // if (hasMemberId) tb.setNullable("member"); + tb.uuid("reviewerUserId"); + tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL"); + }); + // copy project membership => user id to new fields + await knex(TableName.SecretApprovalRequestReviewer).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + reviewerUserId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.member`])) + }); + // drop table + await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => { + if (hasMemberId) tb.dropColumn("member"); + tb.uuid("reviewerUserId").notNullable().alter(); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId"); + const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId"); + if (hasApproverUserId) { + await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => { + if (!hasApproverId) { + tb.uuid("approverId"); + tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE"); + } + }); + + if (!hasApproverId) { + await knex(TableName.SecretApprovalPolicyApprover).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + approverId: knex(TableName.ProjectMembership) + .join( + TableName.SecretApprovalPolicy, + `${TableName.SecretApprovalPolicy}.id`, + `${TableName.SecretApprovalPolicyApprover}.policyId` + ) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`) + .select(knex.ref("id").withSchema(TableName.ProjectMembership)) + .where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`])) + .where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`])) + }); + await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => { + tb.dropColumn("approverUserId"); + tb.uuid("approverId").notNullable().alter(); + }); + } + } + + const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest); + const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId"); + const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId"); + const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy"); + const hasStatusChangedByUser = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangedByUserId"); + if (hasSecretApprovalRequestTable) { + await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => { + // if (hasCommitterId) tb.uuid("committerId").notNullable().alter(); + if (!hasCommitterId) { + tb.uuid("committerId"); + tb.foreign("committerId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE"); + } + if (!hasStatusChangeBy) { + tb.uuid("statusChangeBy"); + tb.foreign("statusChangeBy").references("id").inTable(TableName.ProjectMembership).onDelete("SET NULL"); + } + }); + + await knex(TableName.SecretApprovalRequest).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + committerId: knex(TableName.ProjectMembership) + .join( + TableName.SecretApprovalPolicy, + `${TableName.SecretApprovalPolicy}.id`, + `${TableName.SecretApprovalRequest}.policyId` + ) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`) + .where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`])) + .where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])) + .select(knex.ref("id").withSchema(TableName.ProjectMembership)), + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + statusChangeBy: knex(TableName.ProjectMembership) + .join( + TableName.SecretApprovalPolicy, + `${TableName.SecretApprovalPolicy}.id`, + `${TableName.SecretApprovalRequest}.policyId` + ) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`) + .where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`])) + .where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`])) + .select(knex.ref("id").withSchema(TableName.ProjectMembership)) + }); + + await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => { + if (hasCommitterUserId) tb.dropColumn("committerUserId"); + if (hasStatusChangedByUser) tb.dropColumn("statusChangedByUserId"); + if (hasCommitterId) tb.uuid("committerId").notNullable().alter(); + }); + } + + const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member"); + const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId"); + if (hasReviewerUserId) { + if (!hasMemberId) { + await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => { + // if (hasMemberId) tb.uuid("member").notNullable().alter(); + tb.uuid("member"); + tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE"); + }); + } + await knex(TableName.SecretApprovalRequestReviewer).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + member: knex(TableName.ProjectMembership) + .join( + TableName.SecretApprovalRequest, + `${TableName.SecretApprovalRequest}.id`, + `${TableName.SecretApprovalRequestReviewer}.requestId` + ) + .join( + TableName.SecretApprovalPolicy, + `${TableName.SecretApprovalPolicy}.id`, + `${TableName.SecretApprovalRequest}.policyId` + ) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`) + .where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`])) + .where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`])) + .select(knex.ref("id").withSchema(TableName.ProjectMembership)) + }); + await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => { + tb.uuid("member").notNullable().alter(); + tb.dropColumn("reviewerUserId"); + }); + } +} diff --git a/backend/src/db/migrations/20240702175124_identity-token-auth.ts b/backend/src/db/migrations/20240702175124_identity-token-auth.ts new file mode 100644 index 0000000000..66ca55b491 --- /dev/null +++ b/backend/src/db/migrations/20240702175124_identity-token-auth.ts @@ -0,0 +1,24 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + await knex.schema.createTable(TableName.IdentityTokenAuth, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable(); + t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable(); + t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable(); + t.jsonb("accessTokenTrustedIps").notNullable(); + t.timestamps(true, true, true); + t.uuid("identityId").notNullable().unique(); + t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE"); + }); + + await createOnUpdateTrigger(knex, TableName.IdentityTokenAuth); +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.IdentityTokenAuth); + await dropOnUpdateTrigger(knex, TableName.IdentityTokenAuth); +} diff --git a/backend/src/db/migrations/20240704161322_identity-access-token-name.ts b/backend/src/db/migrations/20240704161322_identity-access-token-name.ts new file mode 100644 index 0000000000..8e84dfc4bd --- /dev/null +++ b/backend/src/db/migrations/20240704161322_identity-access-token-name.ts @@ -0,0 +1,24 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.IdentityAccessToken)) { + const hasNameColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "name"); + if (!hasNameColumn) { + await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => { + t.string("name").nullable(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.IdentityAccessToken)) { + if (await knex.schema.hasColumn(TableName.IdentityAccessToken, "name")) { + await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => { + t.dropColumn("name"); + }); + } + } +} diff --git a/backend/src/db/migrations/20240708100026_external-kms.ts b/backend/src/db/migrations/20240708100026_external-kms.ts new file mode 100644 index 0000000000..63390a635b --- /dev/null +++ b/backend/src/db/migrations/20240708100026_external-kms.ts @@ -0,0 +1,256 @@ +import slugify from "@sindresorhus/slugify"; +import { Knex } from "knex"; + +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { TableName } from "../schemas"; + +const createInternalKmsTableAndBackfillData = async (knex: Knex) => { + const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey); + const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms); + + // building the internal kms table by filling from old kms table + if (doesOldKmsKeyTableExist && !doesInternalKmsTableExist) { + await knex.schema.createTable(TableName.InternalKms, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.binary("encryptedKey").notNullable(); + tb.string("encryptionAlgorithm").notNullable(); + tb.integer("version").defaultTo(1).notNullable(); + tb.uuid("kmsKeyId").unique().notNullable(); + tb.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE"); + }); + + // copy the old kms and backfill + const oldKmsKey = await knex(TableName.KmsKey).select("version", "encryptedKey", "encryptionAlgorithm", "id"); + if (oldKmsKey.length) { + await knex(TableName.InternalKms).insert( + oldKmsKey.map((el) => ({ + encryptionAlgorithm: el.encryptionAlgorithm, + encryptedKey: el.encryptedKey, + kmsKeyId: el.id, + version: el.version + })) + ); + } + } +}; + +const renameKmsKeyVersionTableAsInternalKmsKeyVersion = async (knex: Knex) => { + const doesOldKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.KmsKeyVersion); + const doesNewKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.InternalKmsKeyVersion); + + if (doesOldKmsKeyVersionTableExist && !doesNewKmsKeyVersionTableExist) { + // because we haven't started using versioning for kms thus no data exist + await knex.schema.renameTable(TableName.KmsKeyVersion, TableName.InternalKmsKeyVersion); + const hasKmsKeyIdColumn = await knex.schema.hasColumn(TableName.InternalKmsKeyVersion, "kmsKeyId"); + const hasInternalKmsIdColumn = await knex.schema.hasColumn(TableName.InternalKmsKeyVersion, "internalKmsId"); + + await knex.schema.alterTable(TableName.InternalKmsKeyVersion, (tb) => { + if (hasKmsKeyIdColumn) tb.dropColumn("kmsKeyId"); + if (!hasInternalKmsIdColumn) { + tb.uuid("internalKmsId").notNullable(); + tb.foreign("internalKmsId").references("id").inTable(TableName.InternalKms).onDelete("CASCADE"); + } + }); + } +}; + +const createExternalKmsKeyTable = async (knex: Knex) => { + const doesExternalKmsServiceExist = await knex.schema.hasTable(TableName.ExternalKms); + if (!doesExternalKmsServiceExist) { + await knex.schema.createTable(TableName.ExternalKms, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.string("provider").notNullable(); + tb.binary("encryptedProviderInputs").notNullable(); + tb.string("status"); + tb.string("statusDetails"); + tb.uuid("kmsKeyId").unique().notNullable(); + tb.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE"); + }); + } +}; + +const removeNonRequiredFieldsFromKmsKeyTableAndBackfillRequiredData = async (knex: Knex) => { + const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey); + + // building the internal kms table by filling from old kms table + if (doesOldKmsKeyTableExist) { + const hasSlugColumn = await knex.schema.hasColumn(TableName.KmsKey, "slug"); + const hasEncryptedKeyColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptedKey"); + const hasEncryptionAlgorithmColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptionAlgorithm"); + const hasVersionColumn = await knex.schema.hasColumn(TableName.KmsKey, "version"); + const hasTimestamps = await knex.schema.hasColumn(TableName.KmsKey, "createdAt"); + const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId"); + const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId"); + + await knex.schema.alterTable(TableName.KmsKey, (tb) => { + if (!hasSlugColumn) tb.string("slug", 32); + if (hasEncryptedKeyColumn) tb.dropColumn("encryptedKey"); + if (hasEncryptionAlgorithmColumn) tb.dropColumn("encryptionAlgorithm"); + if (hasVersionColumn) tb.dropColumn("version"); + if (!hasTimestamps) tb.timestamps(true, true, true); + }); + + // backfill all org id in kms key because its gonna be changed to non nullable + if (hasProjectId && hasOrgId) { + await knex(TableName.KmsKey) + .whereNull("orgId") + .update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + orgId: knex(TableName.Project) + .select("orgId") + .where("id", knex.raw("??", [`${TableName.KmsKey}.projectId`])) + }); + } + + // backfill slugs in kms + const missingSlugs = await knex(TableName.KmsKey).whereNull("slug").select("id"); + if (missingSlugs.length) { + await knex(TableName.KmsKey) + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + .insert(missingSlugs.map(({ id }) => ({ id, slug: slugify(alphaNumericNanoId(8).toLowerCase()) }))) + .onConflict("id") + .merge(); + } + + await knex.schema.alterTable(TableName.KmsKey, (tb) => { + if (hasOrgId) tb.uuid("orgId").notNullable().alter(); + tb.string("slug", 32).notNullable().alter(); + if (hasProjectId) tb.dropColumn("projectId"); + if (hasOrgId) tb.unique(["orgId", "slug"]); + }); + } +}; + +/* + * The goal for this migration is split the existing kms key into three table + * the kms-key table would be a container table that contains + * the internal kms key table and external kms table + */ +export async function up(knex: Knex): Promise { + await createInternalKmsTableAndBackfillData(knex); + await renameKmsKeyVersionTableAsInternalKmsKeyVersion(knex); + await removeNonRequiredFieldsFromKmsKeyTableAndBackfillRequiredData(knex); + await createExternalKmsKeyTable(knex); + + const doesOrgKmsKeyExist = await knex.schema.hasColumn(TableName.Organization, "kmsDefaultKeyId"); + if (!doesOrgKmsKeyExist) { + await knex.schema.alterTable(TableName.Organization, (tb) => { + tb.uuid("kmsDefaultKeyId").nullable(); + tb.foreign("kmsDefaultKeyId").references("id").inTable(TableName.KmsKey); + }); + } + + const doesProjectKmsSecretManagerKeyExist = await knex.schema.hasColumn(TableName.Project, "kmsSecretManagerKeyId"); + if (!doesProjectKmsSecretManagerKeyExist) { + await knex.schema.alterTable(TableName.Project, (tb) => { + tb.uuid("kmsSecretManagerKeyId").nullable(); + tb.foreign("kmsSecretManagerKeyId").references("id").inTable(TableName.KmsKey); + }); + } +} + +const renameInternalKmsKeyVersionBackToKmsKeyVersion = async (knex: Knex) => { + const doesInternalKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.InternalKmsKeyVersion); + const doesKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.KmsKeyVersion); + if (doesInternalKmsKeyVersionTableExist && !doesKmsKeyVersionTableExist) { + // because we haven't started using versioning for kms thus no data exist + await knex.schema.renameTable(TableName.InternalKmsKeyVersion, TableName.KmsKeyVersion); + const hasInternalKmsIdColumn = await knex.schema.hasColumn(TableName.KmsKeyVersion, "internalKmsId"); + const hasKmsKeyIdColumn = await knex.schema.hasColumn(TableName.KmsKeyVersion, "kmsKeyId"); + + await knex.schema.alterTable(TableName.KmsKeyVersion, (tb) => { + if (hasInternalKmsIdColumn) tb.dropColumn("internalKmsId"); + if (!hasKmsKeyIdColumn) { + tb.uuid("kmsKeyId").notNullable(); + tb.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE"); + } + }); + } +}; + +const bringBackKmsKeyFields = async (knex: Knex) => { + const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey); + const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms); + if (doesOldKmsKeyTableExist && doesInternalKmsTableExist) { + const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug"); + const hasEncryptedKeyColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptedKey"); + const hasEncryptionAlgorithmColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptionAlgorithm"); + const hasVersionColumn = await knex.schema.hasColumn(TableName.KmsKey, "version"); + const hasNullableOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId"); + const hasProjectIdColumn = await knex.schema.hasColumn(TableName.KmsKey, "projectId"); + + await knex.schema.alterTable(TableName.KmsKey, (tb) => { + if (!hasEncryptedKeyColumn) tb.binary("encryptedKey"); + if (!hasEncryptionAlgorithmColumn) tb.string("encryptionAlgorithm"); + if (!hasVersionColumn) tb.integer("version").defaultTo(1); + if (hasNullableOrgId) tb.uuid("orgId").nullable().alter(); + if (!hasProjectIdColumn) { + tb.string("projectId"); + tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + } + if (hasSlug) tb.dropColumn("slug"); + }); + } +}; + +const backfillKmsKeyFromInternalKmsTable = async (knex: Knex) => { + const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey); + const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms); + if (doesInternalKmsTableExist && doesOldKmsKeyTableExist) { + // backfill kms key with internal kms data + await knex(TableName.KmsKey).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + encryptedKey: knex(TableName.InternalKms) + .select("encryptedKey") + .where("kmsKeyId", knex.raw("??", [`${TableName.KmsKey}.id`])), + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + encryptionAlgorithm: knex(TableName.InternalKms) + .select("encryptionAlgorithm") + .where("kmsKeyId", knex.raw("??", [`${TableName.KmsKey}.id`])), + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + projectId: knex(TableName.Project) + .select("id") + .where("kmsCertificateKeyId", knex.raw("??", [`${TableName.KmsKey}.id`])) + }); + } +}; + +export async function down(knex: Knex): Promise { + const doesOrgKmsKeyExist = await knex.schema.hasColumn(TableName.Organization, "kmsDefaultKeyId"); + if (doesOrgKmsKeyExist) { + await knex.schema.alterTable(TableName.Organization, (tb) => { + tb.dropColumn("kmsDefaultKeyId"); + }); + } + + const doesProjectKmsSecretManagerKeyExist = await knex.schema.hasColumn(TableName.Project, "kmsSecretManagerKeyId"); + if (doesProjectKmsSecretManagerKeyExist) { + await knex.schema.alterTable(TableName.Project, (tb) => { + tb.dropColumn("kmsSecretManagerKeyId"); + }); + } + + await renameInternalKmsKeyVersionBackToKmsKeyVersion(knex); + await bringBackKmsKeyFields(knex); + await backfillKmsKeyFromInternalKmsTable(knex); + + const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey); + if (doesOldKmsKeyTableExist) { + await knex.schema.alterTable(TableName.KmsKey, (tb) => { + tb.binary("encryptedKey").notNullable().alter(); + tb.string("encryptionAlgorithm").notNullable().alter(); + }); + } + + const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms); + if (doesInternalKmsTableExist) await knex.schema.dropTable(TableName.InternalKms); + + const doesExternalKmsServiceExist = await knex.schema.hasTable(TableName.ExternalKms); + if (doesExternalKmsServiceExist) await knex.schema.dropTable(TableName.ExternalKms); +} diff --git a/backend/src/db/migrations/20240710045107_identity-oidc-auth.ts b/backend/src/db/migrations/20240710045107_identity-oidc-auth.ts new file mode 100644 index 0000000000..fbf5db2a09 --- /dev/null +++ b/backend/src/db/migrations/20240710045107_identity-oidc-auth.ts @@ -0,0 +1,34 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.IdentityOidcAuth))) { + await knex.schema.createTable(TableName.IdentityOidcAuth, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable(); + t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable(); + t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable(); + t.jsonb("accessTokenTrustedIps").notNullable(); + t.uuid("identityId").notNullable().unique(); + t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE"); + t.string("oidcDiscoveryUrl").notNullable(); + t.text("encryptedCaCert").notNullable(); + t.string("caCertIV").notNullable(); + t.string("caCertTag").notNullable(); + t.string("boundIssuer").notNullable(); + t.string("boundAudiences").notNullable(); + t.jsonb("boundClaims").notNullable(); + t.string("boundSubject"); + t.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.IdentityOidcAuth); + } +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.IdentityOidcAuth); + await dropOnUpdateTrigger(knex, TableName.IdentityOidcAuth); +} diff --git a/backend/src/db/migrations/20240715113110_org-membership-active-status.ts b/backend/src/db/migrations/20240715113110_org-membership-active-status.ts new file mode 100644 index 0000000000..ddb8794dbd --- /dev/null +++ b/backend/src/db/migrations/20240715113110_org-membership-active-status.ts @@ -0,0 +1,25 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.OrgMembership)) { + const doesUserIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "userId"); + const doesOrgIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "orgId"); + await knex.schema.alterTable(TableName.OrgMembership, (t) => { + t.boolean("isActive").notNullable().defaultTo(true); + if (doesUserIdExist && doesOrgIdExist) t.index(["userId", "orgId"]); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.OrgMembership)) { + const doesUserIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "userId"); + const doesOrgIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "orgId"); + await knex.schema.alterTable(TableName.OrgMembership, (t) => { + t.dropColumn("isActive"); + if (doesUserIdExist && doesOrgIdExist) t.dropIndex(["userId", "orgId"]); + }); + } +} diff --git a/backend/src/db/migrations/20240717184929_add-enforcement-level-secrets-policies.ts b/backend/src/db/migrations/20240717184929_add-enforcement-level-secrets-policies.ts new file mode 100644 index 0000000000..ee668d1454 --- /dev/null +++ b/backend/src/db/migrations/20240717184929_add-enforcement-level-secrets-policies.ts @@ -0,0 +1,23 @@ +import { Knex } from "knex"; + +import { EnforcementLevel } from "@app/lib/types"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "enforcementLevel"); + if (!hasColumn) { + await knex.schema.table(TableName.SecretApprovalPolicy, (table) => { + table.string("enforcementLevel", 10).notNullable().defaultTo(EnforcementLevel.Hard); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "enforcementLevel"); + if (hasColumn) { + await knex.schema.table(TableName.SecretApprovalPolicy, (table) => { + table.dropColumn("enforcementLevel"); + }); + } +} diff --git a/backend/src/db/migrations/20240717194958_add-enforcement-level-access-policies.ts b/backend/src/db/migrations/20240717194958_add-enforcement-level-access-policies.ts new file mode 100644 index 0000000000..83ef072010 --- /dev/null +++ b/backend/src/db/migrations/20240717194958_add-enforcement-level-access-policies.ts @@ -0,0 +1,23 @@ +import { Knex } from "knex"; + +import { EnforcementLevel } from "@app/lib/types"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "enforcementLevel"); + if (!hasColumn) { + await knex.schema.table(TableName.AccessApprovalPolicy, (table) => { + table.string("enforcementLevel", 10).notNullable().defaultTo(EnforcementLevel.Hard); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "enforcementLevel"); + if (hasColumn) { + await knex.schema.table(TableName.AccessApprovalPolicy, (table) => { + table.dropColumn("enforcementLevel"); + }); + } +} diff --git a/backend/src/db/migrations/20240718170955_add-access-secret-sharing.ts b/backend/src/db/migrations/20240718170955_add-access-secret-sharing.ts new file mode 100644 index 0000000000..705c8d986a --- /dev/null +++ b/backend/src/db/migrations/20240718170955_add-access-secret-sharing.ts @@ -0,0 +1,23 @@ +import { Knex } from "knex"; + +import { SecretSharingAccessType } from "@app/lib/types"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.SecretSharing, "accessType"); + if (!hasColumn) { + await knex.schema.table(TableName.SecretSharing, (table) => { + table.string("accessType").notNullable().defaultTo(SecretSharingAccessType.Anyone); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.SecretSharing, "accessType"); + if (hasColumn) { + await knex.schema.table(TableName.SecretSharing, (table) => { + table.dropColumn("accessType"); + }); + } +} diff --git a/backend/src/db/migrations/20240719182539_add-bypass-reason-secret-approval-requets.ts b/backend/src/db/migrations/20240719182539_add-bypass-reason-secret-approval-requets.ts new file mode 100644 index 0000000000..6b688dbc39 --- /dev/null +++ b/backend/src/db/migrations/20240719182539_add-bypass-reason-secret-approval-requets.ts @@ -0,0 +1,21 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "bypassReason"); + if (!hasColumn) { + await knex.schema.table(TableName.SecretApprovalRequest, (table) => { + table.string("bypassReason").nullable(); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "bypassReason"); + if (hasColumn) { + await knex.schema.table(TableName.SecretApprovalRequest, (table) => { + table.dropColumn("bypassReason"); + }); + } +} diff --git a/backend/src/db/migrations/20240724101056_access-request-groups.ts b/backend/src/db/migrations/20240724101056_access-request-groups.ts new file mode 100644 index 0000000000..1576c201c5 --- /dev/null +++ b/backend/src/db/migrations/20240724101056_access-request-groups.ts @@ -0,0 +1,294 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + // ---------- ACCESS APPROVAL POLICY APPROVER ------------ + const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId"); + const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId"); + + if (!hasApproverUserId) { + // add the new fields + await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => { + // if (hasApproverId) tb.setNullable("approverId"); + tb.uuid("approverUserId"); + tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE"); + }); + + // convert project membership id => user id + await knex(TableName.AccessApprovalPolicyApprover).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + approverUserId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverId`])) + }); + // drop the old field + await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => { + if (hasApproverId) tb.dropColumn("approverId"); + tb.uuid("approverUserId").notNullable().alter(); + }); + } + + // ---------- ACCESS APPROVAL REQUEST ------------ + const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest); + const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId"); + const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy"); + + if (hasAccessApprovalRequestTable) { + // new fields + await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => { + if (!hasRequestedByUserId) { + tb.uuid("requestedByUserId"); + tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL"); + } + }); + + // copy the assigned project membership => user id to new fields + await knex(TableName.AccessApprovalRequest).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + requestedByUserId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedBy`])) + }); + // drop old fields + await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => { + if (hasRequestedBy) { + // DROP AT A LATER TIME + // tb.dropColumn("requestedBy"); + + // ADD ALLOW NULLABLE FOR NOW + tb.uuid("requestedBy").nullable().alter(); + } + tb.uuid("requestedByUserId").notNullable().alter(); + }); + } + + // ---------- ACCESS APPROVAL REQUEST REVIEWER ------------ + const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member"); + const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId"); + if (!hasReviewerUserId) { + // new fields + await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => { + // if (hasMemberId) tb.setNullable("member"); + tb.uuid("reviewerUserId"); + tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL"); + }); + // copy project membership => user id to new fields + await knex(TableName.AccessApprovalRequestReviewer).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + reviewerUserId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.member`])) + }); + // drop table + await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => { + if (hasMemberId) { + // DROP AT A LATER TIME + // tb.dropColumn("member"); + + // ADD ALLOW NULLABLE FOR NOW + tb.uuid("member").nullable().alter(); + } + tb.uuid("reviewerUserId").notNullable().alter(); + }); + } + + // ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------ + const projectUserAdditionalPrivilegeHasProjectMembershipId = await knex.schema.hasColumn( + TableName.ProjectUserAdditionalPrivilege, + "projectMembershipId" + ); + + const projectUserAdditionalPrivilegeHasUserId = await knex.schema.hasColumn( + TableName.ProjectUserAdditionalPrivilege, + "userId" + ); + + if (!projectUserAdditionalPrivilegeHasUserId) { + await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => { + tb.uuid("userId"); + tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE"); + + tb.string("projectId"); + tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + }); + + await knex(TableName.ProjectUserAdditionalPrivilege) + .update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + userId: knex(TableName.ProjectMembership) + .select("userId") + .where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`])), + + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + projectId: knex(TableName.ProjectMembership) + .select("projectId") + .where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`])) + }) + .whereNotNull("projectMembershipId"); + + await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => { + tb.uuid("userId").notNullable().alter(); + tb.string("projectId").notNullable().alter(); + }); + } + + if (projectUserAdditionalPrivilegeHasProjectMembershipId) { + await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => { + // DROP AT A LATER TIME + // tb.dropColumn("projectMembershipId"); + + // ADD ALLOW NULLABLE FOR NOW + tb.uuid("projectMembershipId").nullable().alter(); + }); + } +} + +export async function down(knex: Knex): Promise { + // We remove project user additional privileges first, because it may delete records in the database where the project membership is not found. + // The project membership won't be found on records created by group members. In those cades we just delete the record and continue. + // When the additionl privilege record is deleted, it will cascade delete the access request created by the group member. + + // ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------ + const hasUserId = await knex.schema.hasColumn(TableName.ProjectUserAdditionalPrivilege, "userId"); + const hasProjectMembershipId = await knex.schema.hasColumn( + TableName.ProjectUserAdditionalPrivilege, + "projectMembershipId" + ); + + // If it doesn't have the userId field, then the up migration has not run + if (!hasUserId) { + return; + } + + await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => { + if (!hasProjectMembershipId) { + tb.uuid("projectMembershipId"); + tb.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE"); + } + }); + + if (!hasProjectMembershipId) { + // First, update records where a matching project membership exists + await knex(TableName.ProjectUserAdditionalPrivilege).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + projectMembershipId: knex(TableName.ProjectMembership) + .select("id") + .where("userId", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.userId`])) + }); + + await knex(TableName.AccessApprovalRequest).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + projectMembershipId: knex(TableName.ProjectMembership) + .select("id") + .where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.userId`])) + }); + + await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => { + tb.dropColumn("userId"); + tb.dropColumn("projectId"); + + tb.uuid("projectMembershipId").notNullable().alter(); + }); + } + + // Then, delete records where no matching project membership was found + await knex(TableName.ProjectUserAdditionalPrivilege).whereNull("projectMembershipId").delete(); + await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete(); + + // ---------- ACCESS APPROVAL POLICY APPROVER ------------ + const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId"); + const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId"); + + if (hasApproverUserId) { + await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => { + if (!hasApproverId) { + tb.uuid("approverId"); + tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE"); + } + }); + + if (!hasApproverId) { + await knex(TableName.AccessApprovalPolicyApprover).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + approverId: knex(TableName.ProjectMembership) + .select("id") + .where("userId", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverUserId`])) + }); + await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => { + tb.dropColumn("approverUserId"); + + tb.uuid("approverId").notNullable().alter(); + }); + } + + // ---------- ACCESS APPROVAL REQUEST ------------ + const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest); + const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId"); + const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy"); + + if (hasAccessApprovalRequestTable) { + await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => { + if (!hasRequestedBy) { + tb.uuid("requestedBy"); + tb.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE"); + } + }); + + // Try to find a project membership based on the AccessApprovalRequest.requestedByUserId and AccessApprovalRequest.policyId(reference to AccessApprovalRequestPolicy).envId(reference to Environment).projectId(reference to Project) + // If a project membership is found, set the AccessApprovalRequest.requestedBy to the project membership id + // If a project membership is not found, remove the AccessApprovalRequest record + + await knex(TableName.AccessApprovalRequest).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + requestedBy: knex(TableName.ProjectMembership) + .select("id") + .where("userId", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedByUserId`])) + }); + + // Then, delete records where no matching project membership was found + await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete(); + + await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => { + if (hasRequestedByUserId) { + tb.dropColumn("requestedByUserId"); + } + if (hasRequestedBy) tb.uuid("requestedBy").notNullable().alter(); + }); + } + + // ---------- ACCESS APPROVAL REQUEST REVIEWER ------------ + const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member"); + const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId"); + + if (hasReviewerUserId) { + if (!hasMemberId) { + await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => { + tb.uuid("member"); + tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE"); + }); + } + await knex(TableName.AccessApprovalRequestReviewer).update({ + // eslint-disable-next-line + // @ts-ignore because generate schema happens after this + member: knex(TableName.ProjectMembership) + .select("id") + .where("userId", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`])) + }); + await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => { + tb.dropColumn("reviewerUserId"); + + tb.uuid("member").notNullable().alter(); + }); + } + } +} diff --git a/backend/src/db/migrations/20240728010334_secret-sharing-name.ts b/backend/src/db/migrations/20240728010334_secret-sharing-name.ts new file mode 100644 index 0000000000..5bf43065fa --- /dev/null +++ b/backend/src/db/migrations/20240728010334_secret-sharing-name.ts @@ -0,0 +1,39 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.SecretSharing)) { + const doesNameExist = await knex.schema.hasColumn(TableName.SecretSharing, "name"); + if (!doesNameExist) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + t.string("name").nullable(); + }); + } + + const doesLastViewedAtExist = await knex.schema.hasColumn(TableName.SecretSharing, "lastViewedAt"); + if (!doesLastViewedAtExist) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + t.timestamp("lastViewedAt").nullable(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.SecretSharing)) { + const doesNameExist = await knex.schema.hasColumn(TableName.SecretSharing, "name"); + if (doesNameExist) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + t.dropColumn("name"); + }); + } + + const doesLastViewedAtExist = await knex.schema.hasColumn(TableName.SecretSharing, "lastViewedAt"); + if (doesLastViewedAtExist) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + t.dropColumn("lastViewedAt"); + }); + } + } +} diff --git a/backend/src/db/migrations/20240730181830_add-org-kms-data-key.ts b/backend/src/db/migrations/20240730181830_add-org-kms-data-key.ts new file mode 100644 index 0000000000..13e6a1ef62 --- /dev/null +++ b/backend/src/db/migrations/20240730181830_add-org-kms-data-key.ts @@ -0,0 +1,21 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasKmsDataKeyCol = await knex.schema.hasColumn(TableName.Organization, "kmsEncryptedDataKey"); + await knex.schema.alterTable(TableName.Organization, (tb) => { + if (!hasKmsDataKeyCol) { + tb.binary("kmsEncryptedDataKey"); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasKmsDataKeyCol = await knex.schema.hasColumn(TableName.Organization, "kmsEncryptedDataKey"); + await knex.schema.alterTable(TableName.Organization, (t) => { + if (hasKmsDataKeyCol) { + t.dropColumn("kmsEncryptedDataKey"); + } + }); +} diff --git a/backend/src/db/migrations/20240730181840_add-project-data-key.ts b/backend/src/db/migrations/20240730181840_add-project-data-key.ts new file mode 100644 index 0000000000..e8ef083a37 --- /dev/null +++ b/backend/src/db/migrations/20240730181840_add-project-data-key.ts @@ -0,0 +1,29 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasKmsSecretManagerEncryptedDataKey = await knex.schema.hasColumn( + TableName.Project, + "kmsSecretManagerEncryptedDataKey" + ); + + await knex.schema.alterTable(TableName.Project, (tb) => { + if (!hasKmsSecretManagerEncryptedDataKey) { + tb.binary("kmsSecretManagerEncryptedDataKey"); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasKmsSecretManagerEncryptedDataKey = await knex.schema.hasColumn( + TableName.Project, + "kmsSecretManagerEncryptedDataKey" + ); + + await knex.schema.alterTable(TableName.Project, (t) => { + if (hasKmsSecretManagerEncryptedDataKey) { + t.dropColumn("kmsSecretManagerEncryptedDataKey"); + } + }); +} diff --git a/backend/src/db/migrations/20240730181850_secret-v2.ts b/backend/src/db/migrations/20240730181850_secret-v2.ts new file mode 100644 index 0000000000..d44c67cf1a --- /dev/null +++ b/backend/src/db/migrations/20240730181850_secret-v2.ts @@ -0,0 +1,181 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { Knex } from "knex"; + +import { SecretType, TableName } from "../schemas"; +import { createJunctionTable, createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + const doesSecretV2TableExist = await knex.schema.hasTable(TableName.SecretV2); + if (!doesSecretV2TableExist) { + await knex.schema.createTable(TableName.SecretV2, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.integer("version").defaultTo(1).notNullable(); + t.string("type").notNullable().defaultTo(SecretType.Shared); + t.string("key", 500).notNullable(); + t.binary("encryptedValue"); + t.binary("encryptedComment"); + t.string("reminderNote"); + t.integer("reminderRepeatDays"); + t.boolean("skipMultilineEncoding").defaultTo(false); + t.jsonb("metadata"); + t.uuid("userId"); + t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE"); + t.uuid("folderId").notNullable(); + t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE"); + t.timestamps(true, true, true); + t.index(["folderId", "userId"]); + }); + } + await createOnUpdateTrigger(knex, TableName.SecretV2); + + // many to many relation between tags + await createJunctionTable(knex, TableName.SecretV2JnTag, TableName.SecretV2, TableName.SecretTag); + + const doesSecretV2VersionTableExist = await knex.schema.hasTable(TableName.SecretVersionV2); + if (!doesSecretV2VersionTableExist) { + await knex.schema.createTable(TableName.SecretVersionV2, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.integer("version").defaultTo(1).notNullable(); + t.string("type").notNullable().defaultTo(SecretType.Shared); + t.string("key", 500).notNullable(); + t.binary("encryptedValue"); + t.binary("encryptedComment"); + t.string("reminderNote"); + t.integer("reminderRepeatDays"); + t.boolean("skipMultilineEncoding").defaultTo(false); + t.jsonb("metadata"); + // to avoid orphan rows + t.uuid("envId"); + t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE"); + t.uuid("secretId").notNullable(); + t.uuid("folderId").notNullable(); + t.uuid("userId"); + t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE"); + t.timestamps(true, true, true); + }); + } + await createOnUpdateTrigger(knex, TableName.SecretVersionV2); + + if (!(await knex.schema.hasTable(TableName.SecretReferenceV2))) { + await knex.schema.createTable(TableName.SecretReferenceV2, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.string("environment").notNullable(); + t.string("secretPath").notNullable(); + t.string("secretKey", 500).notNullable(); + t.uuid("secretId").notNullable(); + t.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE"); + }); + } + + await createJunctionTable(knex, TableName.SecretVersionV2Tag, TableName.SecretVersionV2, TableName.SecretTag); + + if (!(await knex.schema.hasTable(TableName.SecretApprovalRequestSecretV2))) { + await knex.schema.createTable(TableName.SecretApprovalRequestSecretV2, (t) => { + // everything related to secret + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.integer("version").defaultTo(1); + t.string("key", 500).notNullable(); + t.binary("encryptedValue"); + t.binary("encryptedComment"); + t.string("reminderNote"); + t.integer("reminderRepeatDays"); + t.boolean("skipMultilineEncoding").defaultTo(false); + t.jsonb("metadata"); + t.timestamps(true, true, true); + // commit details + t.uuid("requestId").notNullable(); + t.foreign("requestId").references("id").inTable(TableName.SecretApprovalRequest).onDelete("CASCADE"); + t.string("op").notNullable(); + t.uuid("secretId"); + t.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("SET NULL"); + t.uuid("secretVersion"); + t.foreign("secretVersion").references("id").inTable(TableName.SecretVersionV2).onDelete("SET NULL"); + }); + } + + if (!(await knex.schema.hasTable(TableName.SecretApprovalRequestSecretTagV2))) { + await knex.schema.createTable(TableName.SecretApprovalRequestSecretTagV2, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.uuid("secretId").notNullable(); + t.foreign("secretId").references("id").inTable(TableName.SecretApprovalRequestSecretV2).onDelete("CASCADE"); + t.uuid("tagId").notNullable(); + t.foreign("tagId").references("id").inTable(TableName.SecretTag).onDelete("CASCADE"); + t.timestamps(true, true, true); + }); + } + + if (!(await knex.schema.hasTable(TableName.SnapshotSecretV2))) { + await knex.schema.createTable(TableName.SnapshotSecretV2, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.uuid("envId").index().notNullable(); + t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE"); + // not a relation kept like that to keep it when rolled back + t.uuid("secretVersionId").index().notNullable(); + t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE"); + t.uuid("snapshotId").index().notNullable(); + t.foreign("snapshotId").references("id").inTable(TableName.Snapshot).onDelete("CASCADE"); + t.timestamps(true, true, true); + }); + } + + if (await knex.schema.hasTable(TableName.IntegrationAuth)) { + const hasEncryptedAccess = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccess"); + const hasEncryptedAccessId = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccessId"); + const hasEncryptedRefresh = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedRefresh"); + const hasEncryptedAwsIamAssumRole = await knex.schema.hasColumn( + TableName.IntegrationAuth, + "encryptedAwsAssumeIamRoleArn" + ); + await knex.schema.alterTable(TableName.IntegrationAuth, (t) => { + if (!hasEncryptedAccess) t.binary("encryptedAccess"); + if (!hasEncryptedAccessId) t.binary("encryptedAccessId"); + if (!hasEncryptedRefresh) t.binary("encryptedRefresh"); + if (!hasEncryptedAwsIamAssumRole) t.binary("encryptedAwsAssumeIamRoleArn"); + }); + } + + if (!(await knex.schema.hasTable(TableName.SecretRotationOutputV2))) { + await knex.schema.createTable(TableName.SecretRotationOutputV2, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.string("key").notNullable(); + t.uuid("secretId").notNullable(); + t.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE"); + t.uuid("rotationId").notNullable(); + t.foreign("rotationId").references("id").inTable(TableName.SecretRotation).onDelete("CASCADE"); + }); + } +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.SnapshotSecretV2); + await knex.schema.dropTableIfExists(TableName.SecretApprovalRequestSecretTagV2); + await knex.schema.dropTableIfExists(TableName.SecretApprovalRequestSecretV2); + + await knex.schema.dropTableIfExists(TableName.SecretV2JnTag); + await knex.schema.dropTableIfExists(TableName.SecretReferenceV2); + + await knex.schema.dropTableIfExists(TableName.SecretRotationOutputV2); + + await dropOnUpdateTrigger(knex, TableName.SecretVersionV2); + await knex.schema.dropTableIfExists(TableName.SecretVersionV2Tag); + await knex.schema.dropTableIfExists(TableName.SecretVersionV2); + + await dropOnUpdateTrigger(knex, TableName.SecretV2); + await knex.schema.dropTableIfExists(TableName.SecretV2); + + if (await knex.schema.hasTable(TableName.IntegrationAuth)) { + const hasEncryptedAccess = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccess"); + const hasEncryptedAccessId = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccessId"); + const hasEncryptedRefresh = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedRefresh"); + const hasEncryptedAwsIamAssumRole = await knex.schema.hasColumn( + TableName.IntegrationAuth, + "encryptedAwsAssumeIamRoleArn" + ); + await knex.schema.alterTable(TableName.IntegrationAuth, (t) => { + if (hasEncryptedAccess) t.dropColumn("encryptedAccess"); + if (hasEncryptedAccessId) t.dropColumn("encryptedAccessId"); + if (hasEncryptedRefresh) t.dropColumn("encryptedRefresh"); + if (hasEncryptedAwsIamAssumRole) t.dropColumn("encryptedAwsAssumeIamRoleArn"); + }); + } +} diff --git a/backend/src/db/migrations/20240802181855_ca-cert-version.ts b/backend/src/db/migrations/20240802181855_ca-cert-version.ts new file mode 100644 index 0000000000..c38a2d42ad --- /dev/null +++ b/backend/src/db/migrations/20240802181855_ca-cert-version.ts @@ -0,0 +1,119 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.CertificateAuthority)) { + const hasActiveCaCertIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId"); + if (!hasActiveCaCertIdColumn) { + await knex.schema.alterTable(TableName.CertificateAuthority, (t) => { + t.uuid("activeCaCertId").nullable(); + t.foreign("activeCaCertId").references("id").inTable(TableName.CertificateAuthorityCert); + }); + + await knex.raw(` + UPDATE "${TableName.CertificateAuthority}" ca + SET "activeCaCertId" = cac.id + FROM "${TableName.CertificateAuthorityCert}" cac + WHERE ca.id = cac."caId" + `); + } + } + + if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) { + const hasVersionColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version"); + if (!hasVersionColumn) { + await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => { + t.integer("version").nullable(); + t.dropUnique(["caId"]); + }); + + await knex(TableName.CertificateAuthorityCert).update({ version: 1 }).whereNull("version"); + + await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => { + t.integer("version").notNullable().alter(); + }); + } + + const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId"); + if (!hasCaSecretIdColumn) { + await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => { + t.uuid("caSecretId").nullable(); + t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE"); + }); + + await knex.raw(` + UPDATE "${TableName.CertificateAuthorityCert}" cert + SET "caSecretId" = ( + SELECT sec.id + FROM "${TableName.CertificateAuthoritySecret}" sec + WHERE sec."caId" = cert."caId" + ) + `); + + await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => { + t.uuid("caSecretId").notNullable().alter(); + }); + } + } + + if (await knex.schema.hasTable(TableName.CertificateAuthoritySecret)) { + await knex.schema.alterTable(TableName.CertificateAuthoritySecret, (t) => { + t.dropUnique(["caId"]); + }); + } + + if (await knex.schema.hasTable(TableName.Certificate)) { + const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId"); + if (!hasCaCertIdColumn) { + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.uuid("caCertId").nullable(); + t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert); + }); + + await knex.raw(` + UPDATE "${TableName.Certificate}" cert + SET "caCertId" = ( + SELECT caCert.id + FROM "${TableName.CertificateAuthorityCert}" caCert + WHERE caCert."caId" = cert."caId" + )`); + + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.uuid("caCertId").notNullable().alter(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.CertificateAuthority)) { + if (await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId")) { + await knex.schema.alterTable(TableName.CertificateAuthority, (t) => { + t.dropColumn("activeCaCertId"); + }); + } + } + + if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) { + if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version")) { + await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => { + t.dropColumn("version"); + }); + } + + if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId")) { + await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => { + t.dropColumn("caSecretId"); + }); + } + } + + if (await knex.schema.hasTable(TableName.Certificate)) { + if (await knex.schema.hasColumn(TableName.Certificate, "caCertId")) { + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.dropColumn("caCertId"); + }); + } + } +} diff --git a/backend/src/db/migrations/20240806083221_secret-sharing-password.ts b/backend/src/db/migrations/20240806083221_secret-sharing-password.ts new file mode 100644 index 0000000000..7e0f5f30f7 --- /dev/null +++ b/backend/src/db/migrations/20240806083221_secret-sharing-password.ts @@ -0,0 +1,25 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.SecretSharing)) { + const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password"); + if (!doesPasswordExist) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + t.string("password").nullable(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.SecretSharing)) { + const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password"); + if (doesPasswordExist) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + t.dropColumn("password"); + }); + } + } +} diff --git a/backend/src/db/migrations/20240806113425_remove-creation-limit-rate-limit.ts b/backend/src/db/migrations/20240806113425_remove-creation-limit-rate-limit.ts new file mode 100644 index 0000000000..d82e4d65d1 --- /dev/null +++ b/backend/src/db/migrations/20240806113425_remove-creation-limit-rate-limit.ts @@ -0,0 +1,21 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit"); + await knex.schema.alterTable(TableName.RateLimit, (t) => { + if (hasCreationLimitCol) { + t.dropColumn("creationLimit"); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit"); + await knex.schema.alterTable(TableName.RateLimit, (t) => { + if (!hasCreationLimitCol) { + t.integer("creationLimit").defaultTo(30).notNullable(); + } + }); +} diff --git a/backend/src/db/migrations/20240806185442_drop-tag-name.ts b/backend/src/db/migrations/20240806185442_drop-tag-name.ts new file mode 100644 index 0000000000..db85ab2165 --- /dev/null +++ b/backend/src/db/migrations/20240806185442_drop-tag-name.ts @@ -0,0 +1,21 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name"); + if (hasNameField) { + await knex.schema.alterTable(TableName.SecretTag, (t) => { + t.dropColumn("name"); + }); + } +} + +export async function down(knex: Knex): Promise { + const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name"); + if (!hasNameField) { + await knex.schema.alterTable(TableName.SecretTag, (t) => { + t.string("name"); + }); + } +} diff --git a/backend/src/db/migrations/20240818024923_cert-alerting.ts b/backend/src/db/migrations/20240818024923_cert-alerting.ts new file mode 100644 index 0000000000..f60ce8c02b --- /dev/null +++ b/backend/src/db/migrations/20240818024923_cert-alerting.ts @@ -0,0 +1,62 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.PkiCollection))) { + await knex.schema.createTable(TableName.PkiCollection, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.string("projectId").notNullable(); + t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + t.string("name").notNullable(); + t.string("description").notNullable(); + }); + } + + await createOnUpdateTrigger(knex, TableName.PkiCollection); + + if (!(await knex.schema.hasTable(TableName.PkiCollectionItem))) { + await knex.schema.createTable(TableName.PkiCollectionItem, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.uuid("pkiCollectionId").notNullable(); + t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE"); + t.uuid("caId").nullable(); + t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE"); + t.uuid("certId").nullable(); + t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE"); + }); + } + + await createOnUpdateTrigger(knex, TableName.PkiCollectionItem); + + if (!(await knex.schema.hasTable(TableName.PkiAlert))) { + await knex.schema.createTable(TableName.PkiAlert, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.timestamps(true, true, true); + t.string("projectId").notNullable(); + t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + t.uuid("pkiCollectionId").notNullable(); + t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE"); + t.string("name").notNullable(); + t.integer("alertBeforeDays").notNullable(); + t.string("recipientEmails").notNullable(); + t.unique(["name", "projectId"]); + }); + } + + await createOnUpdateTrigger(knex, TableName.PkiAlert); +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.PkiAlert); + await dropOnUpdateTrigger(knex, TableName.PkiAlert); + + await knex.schema.dropTableIfExists(TableName.PkiCollectionItem); + await dropOnUpdateTrigger(knex, TableName.PkiCollectionItem); + + await knex.schema.dropTableIfExists(TableName.PkiCollection); + await dropOnUpdateTrigger(knex, TableName.PkiCollection); +} diff --git a/backend/src/db/migrations/20240818184238_add-certificate-template.ts b/backend/src/db/migrations/20240818184238_add-certificate-template.ts new file mode 100644 index 0000000000..7135c24e08 --- /dev/null +++ b/backend/src/db/migrations/20240818184238_add-certificate-template.ts @@ -0,0 +1,55 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate); + if (!hasCertificateTemplateTable) { + await knex.schema.createTable(TableName.CertificateTemplate, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.uuid("caId").notNullable(); + tb.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE"); + tb.uuid("pkiCollectionId"); + tb.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("SET NULL"); + tb.string("name").notNullable(); + tb.string("commonName").notNullable(); + tb.string("subjectAlternativeName").notNullable(); + tb.string("ttl").notNullable(); + tb.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.CertificateTemplate); + } + + const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn( + TableName.Certificate, + "certificateTemplateId" + ); + + if (!doesCertificateTableHaveTemplateId) { + await knex.schema.alterTable(TableName.Certificate, (tb) => { + tb.uuid("certificateTemplateId"); + tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("SET NULL"); + }); + } +} + +export async function down(knex: Knex): Promise { + const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn( + TableName.Certificate, + "certificateTemplateId" + ); + + if (doesCertificateTableHaveTemplateId) { + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.dropColumn("certificateTemplateId"); + }); + } + + const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate); + if (hasCertificateTemplateTable) { + await knex.schema.dropTable(TableName.CertificateTemplate); + await dropOnUpdateTrigger(knex, TableName.CertificateTemplate); + } +} diff --git a/backend/src/db/migrations/20240819092916_certificate-template-est-configuration.ts b/backend/src/db/migrations/20240819092916_certificate-template-est-configuration.ts new file mode 100644 index 0000000000..38dae17100 --- /dev/null +++ b/backend/src/db/migrations/20240819092916_certificate-template-est-configuration.ts @@ -0,0 +1,26 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + const hasEstConfigTable = await knex.schema.hasTable(TableName.CertificateTemplateEstConfig); + if (!hasEstConfigTable) { + await knex.schema.createTable(TableName.CertificateTemplateEstConfig, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.uuid("certificateTemplateId").notNullable().unique(); + tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("CASCADE"); + tb.binary("encryptedCaChain").notNullable(); + tb.string("hashedPassphrase").notNullable(); + tb.boolean("isEnabled").notNullable(); + tb.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig); + } +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.CertificateTemplateEstConfig); + await dropOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig); +} diff --git a/backend/src/db/migrations/20240821212643_crl-ca-secret-binding.ts b/backend/src/db/migrations/20240821212643_crl-ca-secret-binding.ts new file mode 100644 index 0000000000..eee2437147 --- /dev/null +++ b/backend/src/db/migrations/20240821212643_crl-ca-secret-binding.ts @@ -0,0 +1,36 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) { + const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCrl, "caSecretId"); + if (!hasCaSecretIdColumn) { + await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => { + t.uuid("caSecretId").nullable(); + t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE"); + }); + + await knex.raw(` + UPDATE "${TableName.CertificateAuthorityCrl}" crl + SET "caSecretId" = ( + SELECT sec.id + FROM "${TableName.CertificateAuthoritySecret}" sec + WHERE sec."caId" = crl."caId" + ) + `); + + await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => { + t.uuid("caSecretId").notNullable().alter(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) { + await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => { + t.dropColumn("caSecretId"); + }); + } +} diff --git a/backend/src/db/migrations/20240830142938_native-slack-integration.ts b/backend/src/db/migrations/20240830142938_native-slack-integration.ts new file mode 100644 index 0000000000..e06c061055 --- /dev/null +++ b/backend/src/db/migrations/20240830142938_native-slack-integration.ts @@ -0,0 +1,96 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) { + await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.string("integration").notNullable(); + tb.string("slug").notNullable(); + tb.uuid("orgId").notNullable(); + tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + tb.string("description"); + tb.unique(["orgId", "slug"]); + tb.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations); + } + + if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) { + await knex.schema.createTable(TableName.SlackIntegrations, (tb) => { + tb.uuid("id", { primaryKey: true }).notNullable(); + tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE"); + tb.string("teamId").notNullable(); + tb.string("teamName").notNullable(); + tb.string("slackUserId").notNullable(); + tb.string("slackAppId").notNullable(); + tb.binary("encryptedBotAccessToken").notNullable(); + tb.string("slackBotId").notNullable(); + tb.string("slackBotUserId").notNullable(); + tb.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.SlackIntegrations); + } + + if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) { + await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.string("projectId").notNullable().unique(); + tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + tb.uuid("slackIntegrationId").notNullable(); + tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE"); + tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false); + tb.string("accessRequestChannels").notNullable().defaultTo(""); + tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false); + tb.string("secretRequestChannels").notNullable().defaultTo(""); + tb.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs); + } + + const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId"); + const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn( + TableName.SuperAdmin, + "encryptedSlackClientSecret" + ); + + await knex.schema.alterTable(TableName.SuperAdmin, (tb) => { + if (!doesSuperAdminHaveSlackClientId) { + tb.binary("encryptedSlackClientId"); + } + if (!doesSuperAdminHaveSlackClientSecret) { + tb.binary("encryptedSlackClientSecret"); + } + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs); + await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs); + + await knex.schema.dropTableIfExists(TableName.SlackIntegrations); + await dropOnUpdateTrigger(knex, TableName.SlackIntegrations); + + await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations); + await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations); + + const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId"); + const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn( + TableName.SuperAdmin, + "encryptedSlackClientSecret" + ); + + await knex.schema.alterTable(TableName.SuperAdmin, (tb) => { + if (doesSuperAdminHaveSlackClientId) { + tb.dropColumn("encryptedSlackClientId"); + } + if (doesSuperAdminHaveSlackClientSecret) { + tb.dropColumn("encryptedSlackClientSecret"); + } + }); +} diff --git a/backend/src/db/migrations/20240909145938_cert-template-enforcement.ts b/backend/src/db/migrations/20240909145938_cert-template-enforcement.ts new file mode 100644 index 0000000000..fa359ab441 --- /dev/null +++ b/backend/src/db/migrations/20240909145938_cert-template-enforcement.ts @@ -0,0 +1,25 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.CertificateAuthority)) { + const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn( + TableName.CertificateAuthority, + "requireTemplateForIssuance" + ); + if (!hasRequireTemplateForIssuanceColumn) { + await knex.schema.alterTable(TableName.CertificateAuthority, (t) => { + t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.CertificateAuthority)) { + await knex.schema.alterTable(TableName.CertificateAuthority, (t) => { + t.dropColumn("requireTemplateForIssuance"); + }); + } +} diff --git a/backend/src/db/migrations/20240910070128_add-pki-key-usages.ts b/backend/src/db/migrations/20240910070128_add-pki-key-usages.ts new file mode 100644 index 0000000000..93bfa59db6 --- /dev/null +++ b/backend/src/db/migrations/20240910070128_add-pki-key-usages.ts @@ -0,0 +1,85 @@ +import { Knex } from "knex"; + +import { CertKeyUsage } from "@app/services/certificate/certificate-types"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + // Certificate template + const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages"); + const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages"); + + await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => { + if (!hasKeyUsagesCol) { + tb.specificType("keyUsages", "text[]"); + } + + if (!hasExtendedKeyUsagesCol) { + tb.specificType("extendedKeyUsages", "text[]"); + } + }); + + if (!hasKeyUsagesCol) { + await knex(TableName.CertificateTemplate).update({ + keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT] + }); + } + + if (!hasExtendedKeyUsagesCol) { + await knex(TableName.CertificateTemplate).update({ + extendedKeyUsages: [] + }); + } + + // Certificate + const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages"); + const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages"); + await knex.schema.alterTable(TableName.Certificate, (tb) => { + if (!doesCertTableHaveKeyUsages) { + tb.specificType("keyUsages", "text[]"); + } + + if (!doesCertTableHaveExtendedKeyUsages) { + tb.specificType("extendedKeyUsages", "text[]"); + } + }); + + if (!doesCertTableHaveKeyUsages) { + await knex(TableName.Certificate).update({ + keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT] + }); + } + + if (!doesCertTableHaveExtendedKeyUsages) { + await knex(TableName.Certificate).update({ + extendedKeyUsages: [] + }); + } +} + +export async function down(knex: Knex): Promise { + // Certificate Template + const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages"); + const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages"); + + await knex.schema.alterTable(TableName.CertificateTemplate, (t) => { + if (hasKeyUsagesCol) { + t.dropColumn("keyUsages"); + } + if (hasExtendedKeyUsagesCol) { + t.dropColumn("extendedKeyUsages"); + } + }); + + // Certificate + const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages"); + const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages"); + await knex.schema.alterTable(TableName.Certificate, (t) => { + if (doesCertTableHaveKeyUsages) { + t.dropColumn("keyUsages"); + } + if (doesCertTableHaveExtendedKeyUsages) { + t.dropColumn("extendedKeyUsages"); + } + }); +} diff --git a/backend/src/db/migrations/20240918005344_add-group-approvals.ts b/backend/src/db/migrations/20240918005344_add-group-approvals.ts new file mode 100644 index 0000000000..9a33f53aa5 --- /dev/null +++ b/backend/src/db/migrations/20240918005344_add-group-approvals.ts @@ -0,0 +1,76 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasAccessApproverGroupId = await knex.schema.hasColumn( + TableName.AccessApprovalPolicyApprover, + "approverGroupId" + ); + const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId"); + const hasSecretApproverGroupId = await knex.schema.hasColumn( + TableName.SecretApprovalPolicyApprover, + "approverGroupId" + ); + const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId"); + if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) { + await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => { + // add column approverGroupId to AccessApprovalPolicyApprover + if (!hasAccessApproverGroupId) { + table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE"); + } + + // make approverUserId nullable + if (hasAccessApproverUserId) { + table.uuid("approverUserId").nullable().alter(); + } + }); + await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => { + // add column approverGroupId to SecretApprovalPolicyApprover + if (!hasSecretApproverGroupId) { + table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE"); + } + + // make approverUserId nullable + if (hasSecretApproverUserId) { + table.uuid("approverUserId").nullable().alter(); + } + }); + } +} + +export async function down(knex: Knex): Promise { + const hasAccessApproverGroupId = await knex.schema.hasColumn( + TableName.AccessApprovalPolicyApprover, + "approverGroupId" + ); + const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId"); + const hasSecretApproverGroupId = await knex.schema.hasColumn( + TableName.SecretApprovalPolicyApprover, + "approverGroupId" + ); + const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId"); + + if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) { + await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => { + if (hasAccessApproverGroupId) { + table.dropColumn("approverGroupId"); + } + // make approverUserId not nullable + if (hasAccessApproverUserId) { + table.uuid("approverUserId").notNullable().alter(); + } + }); + + // remove + await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => { + if (hasSecretApproverGroupId) { + table.dropColumn("approverGroupId"); + } + // make approverUserId not nullable + if (hasSecretApproverUserId) { + table.uuid("approverUserId").notNullable().alter(); + } + }); + } +} diff --git a/backend/src/db/migrations/20240924100329_identity-metadata.ts b/backend/src/db/migrations/20240924100329_identity-metadata.ts new file mode 100644 index 0000000000..962c1d3507 --- /dev/null +++ b/backend/src/db/migrations/20240924100329_identity-metadata.ts @@ -0,0 +1,24 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.IdentityMetadata))) { + await knex.schema.createTable(TableName.IdentityMetadata, (tb) => { + tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + tb.string("key").notNullable(); + tb.string("value").notNullable(); + tb.uuid("orgId").notNullable(); + tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + tb.uuid("userId"); + tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE"); + tb.uuid("identityId"); + tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE"); + tb.timestamps(true, true, true); + }); + } +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.IdentityMetadata); +} diff --git a/backend/src/db/migrations/20240925100349_managed-secret-sharing.ts b/backend/src/db/migrations/20240925100349_managed-secret-sharing.ts new file mode 100644 index 0000000000..f64d7f858b --- /dev/null +++ b/backend/src/db/migrations/20240925100349_managed-secret-sharing.ts @@ -0,0 +1,43 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.SecretSharing)) { + const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret"); + const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier"); + + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + t.string("iv").nullable().alter(); + t.string("tag").nullable().alter(); + t.string("encryptedValue").nullable().alter(); + + if (!hasEncryptedSecret) { + t.binary("encryptedSecret").nullable(); + } + t.string("hashedHex").nullable().alter(); + + if (!hasIdentifier) { + t.string("identifier", 64).nullable(); + t.unique("identifier"); + t.index("identifier"); + } + }); + } +} + +export async function down(knex: Knex): Promise { + const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret"); + const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier"); + if (await knex.schema.hasTable(TableName.SecretSharing)) { + await knex.schema.alterTable(TableName.SecretSharing, (t) => { + if (hasEncryptedSecret) { + t.dropColumn("encryptedSecret"); + } + + if (hasIdentifier) { + t.dropColumn("identifier"); + } + }); + } +} diff --git a/backend/src/db/migrations/20240930072738_add-oidc-auth-enforced-to-org.ts b/backend/src/db/migrations/20240930072738_add-oidc-auth-enforced-to-org.ts new file mode 100644 index 0000000000..284ac70503 --- /dev/null +++ b/backend/src/db/migrations/20240930072738_add-oidc-auth-enforced-to-org.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed"))) { + await knex.schema.alterTable(TableName.OidcConfig, (tb) => { + tb.datetime("lastUsed"); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed")) { + await knex.schema.alterTable(TableName.OidcConfig, (tb) => { + tb.dropColumn("lastUsed"); + }); + } +} diff --git a/backend/src/db/migrations/20241003220151_kms-key-cmek-alterations.ts b/backend/src/db/migrations/20241003220151_kms-key-cmek-alterations.ts new file mode 100644 index 0000000000..bdad443c9e --- /dev/null +++ b/backend/src/db/migrations/20241003220151_kms-key-cmek-alterations.ts @@ -0,0 +1,52 @@ +import { Knex } from "knex"; + +import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists"; +import { TableName } from "@app/db/schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.KmsKey)) { + const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId"); + const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug"); + const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId"); + + // drop constraint if exists (won't exist if rolled back, see below) + await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex); + + // projectId for CMEK functionality + await knex.schema.alterTable(TableName.KmsKey, (table) => { + if (!hasProjectId) { + table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE"); + } + + if (hasOrgId && hasSlug) { + table.unique(["orgId", "projectId", "slug"]); + } + + if (hasSlug) { + table.renameColumn("slug", "name"); + } + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.KmsKey)) { + const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId"); + const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name"); + const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId"); + + // remove projectId for CMEK functionality + await knex.schema.alterTable(TableName.KmsKey, (table) => { + if (hasName) { + table.renameColumn("name", "slug"); + } + + if (hasOrgId) { + table.dropUnique(["orgId", "projectId", "slug"]); + } + if (hasProjectId) { + table.dropColumn("projectId"); + } + }); + } +} diff --git a/backend/src/db/migrations/20241005170802_kms-keys-temp-slug-col.ts b/backend/src/db/migrations/20241005170802_kms-keys-temp-slug-col.ts new file mode 100644 index 0000000000..45af3e4b83 --- /dev/null +++ b/backend/src/db/migrations/20241005170802_kms-keys-temp-slug-col.ts @@ -0,0 +1,30 @@ +import { Knex } from "knex"; + +import { TableName } from "@app/db/schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.KmsKey)) { + const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug"); + + if (!hasSlug) { + // add slug back temporarily and set value equal to name + await knex.schema + .alterTable(TableName.KmsKey, (table) => { + table.string("slug", 32); + }) + .then(() => knex(TableName.KmsKey).update("slug", knex.ref("name"))); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.KmsKey)) { + const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug"); + + if (hasSlug) { + await knex.schema.alterTable(TableName.KmsKey, (table) => { + table.dropColumn("slug"); + }); + } + } +} diff --git a/backend/src/db/migrations/20241007052025_make-audit-log-independent.ts b/backend/src/db/migrations/20241007052025_make-audit-log-independent.ts new file mode 100644 index 0000000000..b6b98b9bc0 --- /dev/null +++ b/backend/src/db/migrations/20241007052025_make-audit-log-independent.ts @@ -0,0 +1,48 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.AuditLog)) { + const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId"); + const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId"); + const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName"); + + await knex.schema.alterTable(TableName.AuditLog, (t) => { + if (doesOrgIdExist) { + t.dropForeign("orgId"); + } + + if (doesProjectIdExist) { + t.dropForeign("projectId"); + } + + // add normalized field + if (!doesProjectNameExist) { + t.string("projectName"); + } + }); + } +} + +export async function down(knex: Knex): Promise { + const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId"); + const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId"); + const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName"); + + if (await knex.schema.hasTable(TableName.AuditLog)) { + await knex.schema.alterTable(TableName.AuditLog, (t) => { + if (doesOrgIdExist) { + t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + } + if (doesProjectIdExist) { + t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + } + + // remove normalized field + if (doesProjectNameExist) { + t.dropColumn("projectName"); + } + }); + } +} diff --git a/backend/src/db/migrations/20241007202149_default-org-membership-roles.ts b/backend/src/db/migrations/20241007202149_default-org-membership-roles.ts new file mode 100644 index 0000000000..d80853a8fd --- /dev/null +++ b/backend/src/db/migrations/20241007202149_default-org-membership-roles.ts @@ -0,0 +1,29 @@ +import { Knex } from "knex"; + +import { TableName } from "@app/db/schemas"; + +export async function up(knex: Knex): Promise { + // org default role + if (await knex.schema.hasTable(TableName.Organization)) { + const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole"); + + if (!hasDefaultRoleCol) { + await knex.schema.alterTable(TableName.Organization, (tb) => { + tb.string("defaultMembershipRole").notNullable().defaultTo("member"); + }); + } + } +} + +export async function down(knex: Knex): Promise { + // org default role + if (await knex.schema.hasTable(TableName.Organization)) { + const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole"); + + if (hasDefaultRoleCol) { + await knex.schema.alterTable(TableName.Organization, (tb) => { + tb.dropColumn("defaultMembershipRole"); + }); + } + } +} diff --git a/backend/src/db/migrations/20241008172622_project-permission-split.ts b/backend/src/db/migrations/20241008172622_project-permission-split.ts new file mode 100644 index 0000000000..9227104708 --- /dev/null +++ b/backend/src/db/migrations/20241008172622_project-permission-split.ts @@ -0,0 +1,101 @@ +/* eslint-disable no-await-in-loop */ +import { packRules, unpackRules } from "@casl/ability/extra"; +import { Knex } from "knex"; + +import { + backfillPermissionV1SchemaToV2Schema, + ProjectPermissionSub +} from "@app/ee/services/permission/project-permission"; + +import { TableName } from "../schemas"; + +const CHUNK_SIZE = 1000; +export async function up(knex: Knex): Promise { + const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version"); + if (!hasVersion) { + await knex.schema.alterTable(TableName.ProjectRoles, (t) => { + t.integer("version").defaultTo(1).notNullable(); + }); + + const docs = await knex(TableName.ProjectRoles).select("*"); + const updatedDocs = docs + .filter((i) => { + const permissionString = JSON.stringify(i.permissions || []); + return ( + !permissionString.includes(ProjectPermissionSub.SecretImports) && + !permissionString.includes(ProjectPermissionSub.DynamicSecrets) + ); + }) + .map((el) => ({ + ...el, + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions), true))) + })); + if (updatedDocs.length) { + for (let i = 0; i < updatedDocs.length; i += CHUNK_SIZE) { + const chunk = updatedDocs.slice(i, i + CHUNK_SIZE); + await knex(TableName.ProjectRoles).insert(chunk).onConflict("id").merge(); + } + } + + // secret permission is split into multiple ones like secrets, folders, imports and dynamic-secrets + // so we just find all the privileges with respective mapping and map it as needed + const identityPrivileges = await knex(TableName.IdentityProjectAdditionalPrivilege).select("*"); + const updatedIdentityPrivilegesDocs = identityPrivileges + .filter((i) => { + const permissionString = JSON.stringify(i.permissions || []); + return ( + !permissionString.includes(ProjectPermissionSub.SecretImports) && + !permissionString.includes(ProjectPermissionSub.DynamicSecrets) && + !permissionString.includes(ProjectPermissionSub.SecretFolders) + ); + }) + .map((el) => ({ + ...el, + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions)))) + })); + if (updatedIdentityPrivilegesDocs.length) { + for (let i = 0; i < updatedIdentityPrivilegesDocs.length; i += CHUNK_SIZE) { + const chunk = updatedIdentityPrivilegesDocs.slice(i, i + CHUNK_SIZE); + await knex(TableName.IdentityProjectAdditionalPrivilege).insert(chunk).onConflict("id").merge(); + } + } + + const userPrivileges = await knex(TableName.ProjectUserAdditionalPrivilege).select("*"); + const updatedUserPrivilegeDocs = userPrivileges + .filter((i) => { + const permissionString = JSON.stringify(i.permissions || []); + return ( + !permissionString.includes(ProjectPermissionSub.SecretImports) && + !permissionString.includes(ProjectPermissionSub.DynamicSecrets) && + !permissionString.includes(ProjectPermissionSub.SecretFolders) + ); + }) + .map((el) => ({ + ...el, + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions)))) + })); + if (docs.length) { + for (let i = 0; i < updatedUserPrivilegeDocs.length; i += CHUNK_SIZE) { + const chunk = updatedUserPrivilegeDocs.slice(i, i + CHUNK_SIZE); + await knex(TableName.ProjectUserAdditionalPrivilege).insert(chunk).onConflict("id").merge(); + } + } + } +} + +export async function down(knex: Knex): Promise { + const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version"); + if (hasVersion) { + await knex.schema.alterTable(TableName.ProjectRoles, (t) => { + t.dropColumn("version"); + }); + + // permission change can be ignored + } +} diff --git a/backend/src/db/migrations/20241014084900_identity-multiple-auth-methods.ts b/backend/src/db/migrations/20241014084900_identity-multiple-auth-methods.ts new file mode 100644 index 0000000000..821132e1e8 --- /dev/null +++ b/backend/src/db/migrations/20241014084900_identity-multiple-auth-methods.ts @@ -0,0 +1,76 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +const BATCH_SIZE = 30_000; + +export async function up(knex: Knex): Promise { + const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod"); + + if (!hasAuthMethodColumnAccessToken) { + await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => { + t.string("authMethod").nullable(); + }); + + let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE); + let totalUpdated = 0; + + do { + const batchIds = nullableAccessTokens.map((token) => token.id); + + // ! Update the auth method column in batches for the current batch + // eslint-disable-next-line no-await-in-loop + await knex(TableName.IdentityAccessToken) + .whereIn("id", batchIds) + .update({ + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore because generate schema happens after this + authMethod: knex(TableName.Identity) + .select("authMethod") + .whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`) + .whereNotNull("authMethod") + .first() + }); + + // eslint-disable-next-line no-await-in-loop + nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE); + + totalUpdated += batchIds.length; + console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`); + } while (nullableAccessTokens.length > 0); + + // ! We delete all access tokens where the identity has no auth method set! + // ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted. + await knex(TableName.IdentityAccessToken) + .whereNotExists((queryBuilder) => { + void queryBuilder + .select("id") + .from(TableName.Identity) + .whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`) + .whereNotNull("authMethod"); + }) + .delete(); + + // Finally we set the authMethod to notNullable after populating the column. + // This will fail if the data is not populated correctly, so it's safe. + await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => { + t.string("authMethod").notNullable().alter(); + }); + } + + // ! We aren't dropping the authMethod column from the Identity itself, because we wan't to be able to easily rollback for the time being. +} + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export async function down(knex: Knex): Promise { + const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod"); + + if (hasAuthMethodColumnAccessToken) { + await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => { + t.dropColumn("authMethod"); + }); + } +} + +const config = { transaction: false }; +export { config }; diff --git a/backend/src/db/migrations/20241015084434_increase-identity-metadata-col-length.ts b/backend/src/db/migrations/20241015084434_increase-identity-metadata-col-length.ts new file mode 100644 index 0000000000..e7cdf31cfb --- /dev/null +++ b/backend/src/db/migrations/20241015084434_increase-identity-metadata-col-length.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) { + await knex.schema.alterTable(TableName.IdentityMetadata, (t) => { + t.string("value", 1020).alter(); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) { + await knex.schema.alterTable(TableName.IdentityMetadata, (t) => { + t.string("value", 255).alter(); + }); + } +} diff --git a/backend/src/db/migrations/20241015145450_external-group-org-role-mapping.ts b/backend/src/db/migrations/20241015145450_external-group-org-role-mapping.ts new file mode 100644 index 0000000000..728d49c258 --- /dev/null +++ b/backend/src/db/migrations/20241015145450_external-group-org-role-mapping.ts @@ -0,0 +1,32 @@ +import { Knex } from "knex"; + +import { TableName } from "@app/db/schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils"; + +export async function up(knex: Knex): Promise { + // add external group to org role mapping table + if (!(await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping))) { + await knex.schema.createTable(TableName.ExternalGroupOrgRoleMapping, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.string("groupName").notNullable(); + t.index("groupName"); + t.string("role").notNullable(); + t.uuid("roleId"); + t.foreign("roleId").references("id").inTable(TableName.OrgRoles); + t.uuid("orgId").notNullable(); + t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + t.timestamps(true, true, true); + t.unique(["orgId", "groupName"]); + }); + + await createOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping)) { + await dropOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping); + + await knex.schema.dropTable(TableName.ExternalGroupOrgRoleMapping); + } +} diff --git a/backend/src/db/migrations/20241016183616_add-org-enforce-mfa.ts b/backend/src/db/migrations/20241016183616_add-org-enforce-mfa.ts new file mode 100644 index 0000000000..d01f1698e5 --- /dev/null +++ b/backend/src/db/migrations/20241016183616_add-org-enforce-mfa.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasColumn(TableName.Organization, "enforceMfa"))) { + await knex.schema.alterTable(TableName.Organization, (tb) => { + tb.boolean("enforceMfa").defaultTo(false).notNullable(); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.Organization, "enforceMfa")) { + await knex.schema.alterTable(TableName.Organization, (t) => { + t.dropColumn("enforceMfa"); + }); + } +} diff --git a/backend/src/db/migrations/20241021114650_add-missing-org-cascade-references.ts b/backend/src/db/migrations/20241021114650_add-missing-org-cascade-references.ts new file mode 100644 index 0000000000..7a2cf688bb --- /dev/null +++ b/backend/src/db/migrations/20241021114650_add-missing-org-cascade-references.ts @@ -0,0 +1,21 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) { + await knex.schema.alterTable(TableName.SamlConfig, (t) => { + t.dropForeign("orgId"); + t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) { + await knex.schema.alterTable(TableName.SamlConfig, (t) => { + t.dropForeign("orgId"); + t.foreign("orgId").references("id").inTable(TableName.Organization); + }); + } +} diff --git a/backend/src/db/migrations/20241101174939_project-templates.ts b/backend/src/db/migrations/20241101174939_project-templates.ts new file mode 100644 index 0000000000..ba78adb478 --- /dev/null +++ b/backend/src/db/migrations/20241101174939_project-templates.ts @@ -0,0 +1,28 @@ +import { Knex } from "knex"; + +import { TableName } from "@app/db/schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.ProjectTemplates))) { + await knex.schema.createTable(TableName.ProjectTemplates, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.string("name", 32).notNullable(); + t.string("description").nullable(); + t.jsonb("roles").notNullable(); + t.jsonb("environments").notNullable(); + t.uuid("orgId").notNullable().references("id").inTable(TableName.Organization).onDelete("CASCADE"); + t.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.ProjectTemplates); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.ProjectTemplates)) { + await dropOnUpdateTrigger(knex, TableName.ProjectTemplates); + + await knex.schema.dropTable(TableName.ProjectTemplates); + } +} diff --git a/backend/src/db/migrations/20241107112632_skip-bootstrap-cert-validation-est.ts b/backend/src/db/migrations/20241107112632_skip-bootstrap-cert-validation-est.ts new file mode 100644 index 0000000000..fbee179b38 --- /dev/null +++ b/backend/src/db/migrations/20241107112632_skip-bootstrap-cert-validation-est.ts @@ -0,0 +1,35 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn( + TableName.CertificateTemplateEstConfig, + "disableBootstrapCertValidation" + ); + + const hasCaChainCol = await knex.schema.hasColumn(TableName.CertificateTemplateEstConfig, "encryptedCaChain"); + + await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => { + if (!hasDisableBootstrapCertValidationCol) { + t.boolean("disableBootstrapCertValidation").defaultTo(false).notNullable(); + } + + if (hasCaChainCol) { + t.binary("encryptedCaChain").nullable().alter(); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn( + TableName.CertificateTemplateEstConfig, + "disableBootstrapCertValidation" + ); + + await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => { + if (hasDisableBootstrapCertValidationCol) { + t.dropColumn("disableBootstrapCertValidation"); + } + }); +} diff --git a/backend/src/db/migrations/20241111175154_kms-root-cfg-hsm.ts b/backend/src/db/migrations/20241111175154_kms-root-cfg-hsm.ts new file mode 100644 index 0000000000..501eccb8b8 --- /dev/null +++ b/backend/src/db/migrations/20241111175154_kms-root-cfg-hsm.ts @@ -0,0 +1,23 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy"); + const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt"); + + await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => { + if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE"); + if (!hasTimestampsCol) t.timestamps(true, true, true); + }); +} + +export async function down(knex: Knex): Promise { + const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy"); + const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt"); + + await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => { + if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy"); + if (hasTimestampsCol) t.dropTimestamps(true); + }); +} diff --git a/backend/src/db/migrations/20241112082701_add-totp-support.ts b/backend/src/db/migrations/20241112082701_add-totp-support.ts new file mode 100644 index 0000000000..9aefc444c0 --- /dev/null +++ b/backend/src/db/migrations/20241112082701_add-totp-support.ts @@ -0,0 +1,54 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.TotpConfig))) { + await knex.schema.createTable(TableName.TotpConfig, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.uuid("userId").notNullable(); + t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE"); + t.boolean("isVerified").defaultTo(false).notNullable(); + t.binary("encryptedRecoveryCodes").notNullable(); + t.binary("encryptedSecret").notNullable(); + t.timestamps(true, true, true); + t.unique("userId"); + }); + + await createOnUpdateTrigger(knex, TableName.TotpConfig); + } + + const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod"); + await knex.schema.alterTable(TableName.Organization, (t) => { + if (!doesOrgMfaMethodColExist) { + t.string("selectedMfaMethod"); + } + }); + + const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod"); + await knex.schema.alterTable(TableName.Users, (t) => { + if (!doesUserSelectedMfaMethodColExist) { + t.string("selectedMfaMethod"); + } + }); +} + +export async function down(knex: Knex): Promise { + await dropOnUpdateTrigger(knex, TableName.TotpConfig); + await knex.schema.dropTableIfExists(TableName.TotpConfig); + + const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod"); + await knex.schema.alterTable(TableName.Organization, (t) => { + if (doesOrgMfaMethodColExist) { + t.dropColumn("selectedMfaMethod"); + } + }); + + const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod"); + await knex.schema.alterTable(TableName.Users, (t) => { + if (doesUserSelectedMfaMethodColExist) { + t.dropColumn("selectedMfaMethod"); + } + }); +} diff --git a/backend/src/db/migrations/utils/dropConstraintIfExists.ts b/backend/src/db/migrations/utils/dropConstraintIfExists.ts new file mode 100644 index 0000000000..bfe487d496 --- /dev/null +++ b/backend/src/db/migrations/utils/dropConstraintIfExists.ts @@ -0,0 +1,6 @@ +import { Knex } from "knex"; + +import { TableName } from "@app/db/schemas"; + +export const dropConstraintIfExists = (tableName: TableName, constraintName: string, knex: Knex) => + knex.raw(`ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`); diff --git a/backend/src/db/migrations/utils/kms.ts b/backend/src/db/migrations/utils/kms.ts new file mode 100644 index 0000000000..9ed0909783 --- /dev/null +++ b/backend/src/db/migrations/utils/kms.ts @@ -0,0 +1,105 @@ +import slugify from "@sindresorhus/slugify"; +import { Knex } from "knex"; + +import { TableName } from "@app/db/schemas"; +import { randomSecureBytes } from "@app/lib/crypto"; +import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +const getInstanceRootKey = async (knex: Knex) => { + const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY; + // if root key its base64 encoded + const isBase64 = !process.env.ENCRYPTION_KEY; + if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration"); + const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8"); + + const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000"; + const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first(); + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + if (kmsRootConfig) { + const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer); + // set the flag so that other instancen nodes can start + return decryptedRootKey; + } + + const newRootKey = randomSecureBytes(32); + const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer); + await knex(TableName.KmsServerRootConfig).insert({ + encryptedRootKey, + // eslint-disable-next-line + // @ts-ignore id is kept as fixed for idempotence and to avoid race condition + id: KMS_ROOT_CONFIG_UUID + }); + return encryptedRootKey; +}; + +export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => { + const KMS_VERSION = "v01"; + const KMS_VERSION_BLOB_LENGTH = 3; + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + const project = await knex(TableName.Project).where({ id: projectId }).first(); + if (!project) throw new Error("Missing project id"); + + const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex); + + let secretManagerKmsKey; + const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId; + if (projectSecretManagerKmsId) { + const kmsDoc = await knex(TableName.KmsKey) + .leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`) + .where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId }) + .first(); + if (!kmsDoc) throw new Error("missing kms"); + secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY); + } else { + const [kmsDoc] = await knex(TableName.KmsKey) + .insert({ + name: slugify(alphaNumericNanoId(8).toLowerCase()), + orgId: project.orgId, + isReserved: false + }) + .returning("*"); + + secretManagerKmsKey = randomSecureBytes(32); + const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY); + await knex(TableName.InternalKms).insert({ + version: 1, + encryptedKey: encryptedKeyMaterial, + encryptionAlgorithm: SymmetricEncryption.AES_GCM_256, + kmsKeyId: kmsDoc.id + }); + } + + const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey; + let dataKey: Buffer; + if (!encryptedSecretManagerDataKey) { + dataKey = randomSecureBytes(); + // the below versioning we do it automatically in kms service + const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey); + const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3 + await knex(TableName.Project) + .where({ id: projectId }) + .update({ + kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob]) + }); + } else { + const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH); + dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey); + } + + return { + encryptor: ({ plainText }: { plainText: Buffer }) => { + const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey); + + // Buffer#1 encrypted text + Buffer#2 version number + const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3 + const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]); + return { cipherTextBlob }; + }, + decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => { + const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH); + const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey); + return decryptedBlob; + } + }; +}; diff --git a/backend/src/db/schemas/access-approval-policies-approvers.ts b/backend/src/db/schemas/access-approval-policies-approvers.ts index 4ebbfa9aec..1ecd805138 100644 --- a/backend/src/db/schemas/access-approval-policies-approvers.ts +++ b/backend/src/db/schemas/access-approval-policies-approvers.ts @@ -9,10 +9,11 @@ import { TImmutableDBKeys } from "./models"; export const AccessApprovalPoliciesApproversSchema = z.object({ id: z.string().uuid(), - approverId: z.string().uuid(), policyId: z.string().uuid(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + approverUserId: z.string().uuid().nullable().optional(), + approverGroupId: z.string().uuid().nullable().optional() }); export type TAccessApprovalPoliciesApprovers = z.infer; diff --git a/backend/src/db/schemas/access-approval-policies.ts b/backend/src/db/schemas/access-approval-policies.ts index 69068d23b2..f4c525a4f6 100644 --- a/backend/src/db/schemas/access-approval-policies.ts +++ b/backend/src/db/schemas/access-approval-policies.ts @@ -14,7 +14,8 @@ export const AccessApprovalPoliciesSchema = z.object({ secretPath: z.string().nullable().optional(), envId: z.string().uuid(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + enforcementLevel: z.string().default("hard") }); export type TAccessApprovalPolicies = z.infer; diff --git a/backend/src/db/schemas/access-approval-requests-reviewers.ts b/backend/src/db/schemas/access-approval-requests-reviewers.ts index 509fd74259..a209df2061 100644 --- a/backend/src/db/schemas/access-approval-requests-reviewers.ts +++ b/backend/src/db/schemas/access-approval-requests-reviewers.ts @@ -9,11 +9,12 @@ import { TImmutableDBKeys } from "./models"; export const AccessApprovalRequestsReviewersSchema = z.object({ id: z.string().uuid(), - member: z.string().uuid(), + member: z.string().uuid().nullable().optional(), status: z.string(), requestId: z.string().uuid(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + reviewerUserId: z.string().uuid() }); export type TAccessApprovalRequestsReviewers = z.infer; diff --git a/backend/src/db/schemas/access-approval-requests.ts b/backend/src/db/schemas/access-approval-requests.ts index bd598bac6e..0b20202f5a 100644 --- a/backend/src/db/schemas/access-approval-requests.ts +++ b/backend/src/db/schemas/access-approval-requests.ts @@ -11,12 +11,13 @@ export const AccessApprovalRequestsSchema = z.object({ id: z.string().uuid(), policyId: z.string().uuid(), privilegeId: z.string().uuid().nullable().optional(), - requestedBy: z.string().uuid(), + requestedBy: z.string().uuid().nullable().optional(), isTemporary: z.boolean(), temporaryRange: z.string().nullable().optional(), permissions: z.unknown(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + requestedByUserId: z.string().uuid() }); export type TAccessApprovalRequests = z.infer; diff --git a/backend/src/db/schemas/audit-logs.ts b/backend/src/db/schemas/audit-logs.ts index b8906698b6..d1c239724c 100644 --- a/backend/src/db/schemas/audit-logs.ts +++ b/backend/src/db/schemas/audit-logs.ts @@ -20,7 +20,8 @@ export const AuditLogsSchema = z.object({ createdAt: z.date(), updatedAt: z.date(), orgId: z.string().uuid().nullable().optional(), - projectId: z.string().nullable().optional() + projectId: z.string().nullable().optional(), + projectName: z.string().nullable().optional() }); export type TAuditLogs = z.infer; diff --git a/backend/src/db/schemas/certificate-authorities.ts b/backend/src/db/schemas/certificate-authorities.ts new file mode 100644 index 0000000000..ffe0f7c44c --- /dev/null +++ b/backend/src/db/schemas/certificate-authorities.ts @@ -0,0 +1,39 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateAuthoritiesSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + parentCaId: z.string().uuid().nullable().optional(), + projectId: z.string(), + type: z.string(), + status: z.string(), + friendlyName: z.string(), + organization: z.string(), + ou: z.string(), + country: z.string(), + province: z.string(), + locality: z.string(), + commonName: z.string(), + dn: z.string(), + serialNumber: z.string().nullable().optional(), + maxPathLength: z.number().nullable().optional(), + keyAlgorithm: z.string(), + notBefore: z.date().nullable().optional(), + notAfter: z.date().nullable().optional(), + activeCaCertId: z.string().uuid().nullable().optional(), + requireTemplateForIssuance: z.boolean().default(false) +}); + +export type TCertificateAuthorities = z.infer; +export type TCertificateAuthoritiesInsert = Omit, TImmutableDBKeys>; +export type TCertificateAuthoritiesUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/certificate-authority-certs.ts b/backend/src/db/schemas/certificate-authority-certs.ts new file mode 100644 index 0000000000..7074ce409b --- /dev/null +++ b/backend/src/db/schemas/certificate-authority-certs.ts @@ -0,0 +1,27 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateAuthorityCertsSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + caId: z.string().uuid(), + encryptedCertificate: zodBuffer, + encryptedCertificateChain: zodBuffer, + version: z.number(), + caSecretId: z.string().uuid() +}); + +export type TCertificateAuthorityCerts = z.infer; +export type TCertificateAuthorityCertsInsert = Omit, TImmutableDBKeys>; +export type TCertificateAuthorityCertsUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/certificate-authority-crl.ts b/backend/src/db/schemas/certificate-authority-crl.ts new file mode 100644 index 0000000000..3d63be5d8e --- /dev/null +++ b/backend/src/db/schemas/certificate-authority-crl.ts @@ -0,0 +1,25 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateAuthorityCrlSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + caId: z.string().uuid(), + encryptedCrl: zodBuffer, + caSecretId: z.string().uuid() +}); + +export type TCertificateAuthorityCrl = z.infer; +export type TCertificateAuthorityCrlInsert = Omit, TImmutableDBKeys>; +export type TCertificateAuthorityCrlUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/certificate-authority-secret.ts b/backend/src/db/schemas/certificate-authority-secret.ts new file mode 100644 index 0000000000..36ab1c5060 --- /dev/null +++ b/backend/src/db/schemas/certificate-authority-secret.ts @@ -0,0 +1,27 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateAuthoritySecretSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + caId: z.string().uuid(), + encryptedPrivateKey: zodBuffer +}); + +export type TCertificateAuthoritySecret = z.infer; +export type TCertificateAuthoritySecretInsert = Omit< + z.input, + TImmutableDBKeys +>; +export type TCertificateAuthoritySecretUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/certificate-bodies.ts b/backend/src/db/schemas/certificate-bodies.ts new file mode 100644 index 0000000000..75afbddbdb --- /dev/null +++ b/backend/src/db/schemas/certificate-bodies.ts @@ -0,0 +1,22 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateBodiesSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + certId: z.string().uuid(), + encryptedCertificate: zodBuffer +}); + +export type TCertificateBodies = z.infer; +export type TCertificateBodiesInsert = Omit, TImmutableDBKeys>; +export type TCertificateBodiesUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/certificate-secrets.ts b/backend/src/db/schemas/certificate-secrets.ts new file mode 100644 index 0000000000..f8cad74f14 --- /dev/null +++ b/backend/src/db/schemas/certificate-secrets.ts @@ -0,0 +1,21 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateSecretsSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + certId: z.string().uuid(), + pk: z.string(), + sk: z.string() +}); + +export type TCertificateSecrets = z.infer; +export type TCertificateSecretsInsert = Omit, TImmutableDBKeys>; +export type TCertificateSecretsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/certificate-template-est-configs.ts b/backend/src/db/schemas/certificate-template-est-configs.ts new file mode 100644 index 0000000000..262f22b069 --- /dev/null +++ b/backend/src/db/schemas/certificate-template-est-configs.ts @@ -0,0 +1,30 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateTemplateEstConfigsSchema = z.object({ + id: z.string().uuid(), + certificateTemplateId: z.string().uuid(), + encryptedCaChain: zodBuffer.nullable().optional(), + hashedPassphrase: z.string(), + isEnabled: z.boolean(), + createdAt: z.date(), + updatedAt: z.date(), + disableBootstrapCertValidation: z.boolean().default(false) +}); + +export type TCertificateTemplateEstConfigs = z.infer; +export type TCertificateTemplateEstConfigsInsert = Omit< + z.input, + TImmutableDBKeys +>; +export type TCertificateTemplateEstConfigsUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/certificate-templates.ts b/backend/src/db/schemas/certificate-templates.ts new file mode 100644 index 0000000000..c332d7cf7b --- /dev/null +++ b/backend/src/db/schemas/certificate-templates.ts @@ -0,0 +1,26 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateTemplatesSchema = z.object({ + id: z.string().uuid(), + caId: z.string().uuid(), + pkiCollectionId: z.string().uuid().nullable().optional(), + name: z.string(), + commonName: z.string(), + subjectAlternativeName: z.string(), + ttl: z.string(), + createdAt: z.date(), + updatedAt: z.date(), + keyUsages: z.string().array().nullable().optional(), + extendedKeyUsages: z.string().array().nullable().optional() +}); + +export type TCertificateTemplates = z.infer; +export type TCertificateTemplatesInsert = Omit, TImmutableDBKeys>; +export type TCertificateTemplatesUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/certificates.ts b/backend/src/db/schemas/certificates.ts new file mode 100644 index 0000000000..bde35002f8 --- /dev/null +++ b/backend/src/db/schemas/certificates.ts @@ -0,0 +1,32 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificatesSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + caId: z.string().uuid(), + status: z.string(), + serialNumber: z.string(), + friendlyName: z.string(), + commonName: z.string(), + notBefore: z.date(), + notAfter: z.date(), + revokedAt: z.date().nullable().optional(), + revocationReason: z.number().nullable().optional(), + altNames: z.string().default("").nullable().optional(), + caCertId: z.string().uuid(), + certificateTemplateId: z.string().uuid().nullable().optional(), + keyUsages: z.string().array().nullable().optional(), + extendedKeyUsages: z.string().array().nullable().optional() +}); + +export type TCertificates = z.infer; +export type TCertificatesInsert = Omit, TImmutableDBKeys>; +export type TCertificatesUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/external-group-org-role-mappings.ts b/backend/src/db/schemas/external-group-org-role-mappings.ts new file mode 100644 index 0000000000..f7e6eab25d --- /dev/null +++ b/backend/src/db/schemas/external-group-org-role-mappings.ts @@ -0,0 +1,27 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const ExternalGroupOrgRoleMappingsSchema = z.object({ + id: z.string().uuid(), + groupName: z.string(), + role: z.string(), + roleId: z.string().uuid().nullable().optional(), + orgId: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TExternalGroupOrgRoleMappings = z.infer; +export type TExternalGroupOrgRoleMappingsInsert = Omit< + z.input, + TImmutableDBKeys +>; +export type TExternalGroupOrgRoleMappingsUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/external-kms.ts b/backend/src/db/schemas/external-kms.ts new file mode 100644 index 0000000000..810c3f70f7 --- /dev/null +++ b/backend/src/db/schemas/external-kms.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const ExternalKmsSchema = z.object({ + id: z.string().uuid(), + provider: z.string(), + encryptedProviderInputs: zodBuffer, + status: z.string().nullable().optional(), + statusDetails: z.string().nullable().optional(), + kmsKeyId: z.string().uuid() +}); + +export type TExternalKms = z.infer; +export type TExternalKmsInsert = Omit, TImmutableDBKeys>; +export type TExternalKmsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/identity-access-tokens.ts b/backend/src/db/schemas/identity-access-tokens.ts index 18dbb81930..bbff1b88cf 100644 --- a/backend/src/db/schemas/identity-access-tokens.ts +++ b/backend/src/db/schemas/identity-access-tokens.ts @@ -19,7 +19,9 @@ export const IdentityAccessTokensSchema = z.object({ identityUAClientSecretId: z.string().nullable().optional(), identityId: z.string().uuid(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + name: z.string().nullable().optional(), + authMethod: z.string() }); export type TIdentityAccessTokens = z.infer; diff --git a/backend/src/db/schemas/identity-metadata.ts b/backend/src/db/schemas/identity-metadata.ts new file mode 100644 index 0000000000..de1cec104e --- /dev/null +++ b/backend/src/db/schemas/identity-metadata.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const IdentityMetadataSchema = z.object({ + id: z.string().uuid(), + key: z.string(), + value: z.string(), + orgId: z.string().uuid(), + userId: z.string().uuid().nullable().optional(), + identityId: z.string().uuid().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TIdentityMetadata = z.infer; +export type TIdentityMetadataInsert = Omit, TImmutableDBKeys>; +export type TIdentityMetadataUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/identity-oidc-auths.ts b/backend/src/db/schemas/identity-oidc-auths.ts new file mode 100644 index 0000000000..3d7d38c41a --- /dev/null +++ b/backend/src/db/schemas/identity-oidc-auths.ts @@ -0,0 +1,31 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const IdentityOidcAuthsSchema = z.object({ + id: z.string().uuid(), + accessTokenTTL: z.coerce.number().default(7200), + accessTokenMaxTTL: z.coerce.number().default(7200), + accessTokenNumUsesLimit: z.coerce.number().default(0), + accessTokenTrustedIps: z.unknown(), + identityId: z.string().uuid(), + oidcDiscoveryUrl: z.string(), + encryptedCaCert: z.string(), + caCertIV: z.string(), + caCertTag: z.string(), + boundIssuer: z.string(), + boundAudiences: z.string(), + boundClaims: z.unknown(), + boundSubject: z.string().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TIdentityOidcAuths = z.infer; +export type TIdentityOidcAuthsInsert = Omit, TImmutableDBKeys>; +export type TIdentityOidcAuthsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/identity-token-auths.ts b/backend/src/db/schemas/identity-token-auths.ts new file mode 100644 index 0000000000..0f3c8c9ff6 --- /dev/null +++ b/backend/src/db/schemas/identity-token-auths.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const IdentityTokenAuthsSchema = z.object({ + id: z.string().uuid(), + accessTokenTTL: z.coerce.number().default(7200), + accessTokenMaxTTL: z.coerce.number().default(7200), + accessTokenNumUsesLimit: z.coerce.number().default(0), + accessTokenTrustedIps: z.unknown(), + createdAt: z.date(), + updatedAt: z.date(), + identityId: z.string().uuid() +}); + +export type TIdentityTokenAuths = z.infer; +export type TIdentityTokenAuthsInsert = Omit, TImmutableDBKeys>; +export type TIdentityTokenAuthsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/index.ts b/backend/src/db/schemas/index.ts index 1eaa86c871..74741a8ff8 100644 --- a/backend/src/db/schemas/index.ts +++ b/backend/src/db/schemas/index.ts @@ -8,8 +8,18 @@ export * from "./audit-logs"; export * from "./auth-token-sessions"; export * from "./auth-tokens"; export * from "./backup-private-key"; +export * from "./certificate-authorities"; +export * from "./certificate-authority-certs"; +export * from "./certificate-authority-crl"; +export * from "./certificate-authority-secret"; +export * from "./certificate-bodies"; +export * from "./certificate-secrets"; +export * from "./certificate-template-est-configs"; +export * from "./certificate-templates"; +export * from "./certificates"; export * from "./dynamic-secret-leases"; export * from "./dynamic-secrets"; +export * from "./external-kms"; export * from "./git-app-install-sessions"; export * from "./git-app-org"; export * from "./group-project-membership-roles"; @@ -21,60 +31,82 @@ export * from "./identity-aws-auths"; export * from "./identity-azure-auths"; export * from "./identity-gcp-auths"; export * from "./identity-kubernetes-auths"; +export * from "./identity-metadata"; +export * from "./identity-oidc-auths"; export * from "./identity-org-memberships"; export * from "./identity-project-additional-privilege"; export * from "./identity-project-membership-role"; export * from "./identity-project-memberships"; +export * from "./identity-token-auths"; export * from "./identity-ua-client-secrets"; export * from "./identity-universal-auths"; export * from "./incident-contacts"; export * from "./integration-auths"; export * from "./integrations"; +export * from "./internal-kms"; export * from "./kms-key-versions"; export * from "./kms-keys"; export * from "./kms-root-config"; export * from "./ldap-configs"; export * from "./ldap-group-maps"; export * from "./models"; +export * from "./oidc-configs"; export * from "./org-bots"; export * from "./org-memberships"; export * from "./org-roles"; export * from "./organizations"; +export * from "./pki-alerts"; +export * from "./pki-collection-items"; +export * from "./pki-collections"; export * from "./project-bots"; export * from "./project-environments"; export * from "./project-keys"; export * from "./project-memberships"; export * from "./project-roles"; +export * from "./project-slack-configs"; +export * from "./project-templates"; export * from "./project-user-additional-privilege"; export * from "./project-user-membership-roles"; export * from "./projects"; +export * from "./rate-limit"; export * from "./saml-configs"; export * from "./scim-tokens"; export * from "./secret-approval-policies"; export * from "./secret-approval-policies-approvers"; export * from "./secret-approval-request-secret-tags"; +export * from "./secret-approval-request-secret-tags-v2"; export * from "./secret-approval-requests"; export * from "./secret-approval-requests-reviewers"; export * from "./secret-approval-requests-secrets"; +export * from "./secret-approval-requests-secrets-v2"; export * from "./secret-blind-indexes"; export * from "./secret-folder-versions"; export * from "./secret-folders"; export * from "./secret-imports"; export * from "./secret-references"; +export * from "./secret-references-v2"; +export * from "./secret-rotation-output-v2"; export * from "./secret-rotation-outputs"; export * from "./secret-rotations"; export * from "./secret-scanning-git-risks"; export * from "./secret-sharing"; export * from "./secret-snapshot-folders"; export * from "./secret-snapshot-secrets"; +export * from "./secret-snapshot-secrets-v2"; export * from "./secret-snapshots"; export * from "./secret-tag-junction"; export * from "./secret-tags"; +export * from "./secret-v2-tag-junction"; export * from "./secret-version-tag-junction"; +export * from "./secret-version-v2-tag-junction"; export * from "./secret-versions"; +export * from "./secret-versions-v2"; export * from "./secrets"; +export * from "./secrets-v2"; export * from "./service-tokens"; +export * from "./slack-integrations"; export * from "./super-admin"; +export * from "./totp-configs"; export * from "./trusted-ips"; export * from "./user-actions"; export * from "./user-aliases"; @@ -82,3 +114,4 @@ export * from "./user-encryption-keys"; export * from "./user-group-membership"; export * from "./users"; export * from "./webhooks"; +export * from "./workflow-integrations"; diff --git a/backend/src/db/schemas/integration-auths.ts b/backend/src/db/schemas/integration-auths.ts index 185beae366..85368eff7d 100644 --- a/backend/src/db/schemas/integration-auths.ts +++ b/backend/src/db/schemas/integration-auths.ts @@ -5,6 +5,8 @@ import { z } from "zod"; +import { zodBuffer } from "@app/lib/zod"; + import { TImmutableDBKeys } from "./models"; export const IntegrationAuthsSchema = z.object({ @@ -29,7 +31,14 @@ export const IntegrationAuthsSchema = z.object({ keyEncoding: z.string(), projectId: z.string(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + awsAssumeIamRoleArnCipherText: z.string().nullable().optional(), + awsAssumeIamRoleArnIV: z.string().nullable().optional(), + awsAssumeIamRoleArnTag: z.string().nullable().optional(), + encryptedAccess: zodBuffer.nullable().optional(), + encryptedAccessId: zodBuffer.nullable().optional(), + encryptedRefresh: zodBuffer.nullable().optional(), + encryptedAwsAssumeIamRoleArn: zodBuffer.nullable().optional() }); export type TIntegrationAuths = z.infer; diff --git a/backend/src/db/schemas/internal-kms-key-version.ts b/backend/src/db/schemas/internal-kms-key-version.ts new file mode 100644 index 0000000000..fc1e3c3db1 --- /dev/null +++ b/backend/src/db/schemas/internal-kms-key-version.ts @@ -0,0 +1,21 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const InternalKmsKeyVersionSchema = z.object({ + id: z.string().uuid(), + encryptedKey: zodBuffer, + version: z.number(), + internalKmsId: z.string().uuid() +}); + +export type TInternalKmsKeyVersion = z.infer; +export type TInternalKmsKeyVersionInsert = Omit, TImmutableDBKeys>; +export type TInternalKmsKeyVersionUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/internal-kms.ts b/backend/src/db/schemas/internal-kms.ts new file mode 100644 index 0000000000..38e64dc5b7 --- /dev/null +++ b/backend/src/db/schemas/internal-kms.ts @@ -0,0 +1,22 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const InternalKmsSchema = z.object({ + id: z.string().uuid(), + encryptedKey: zodBuffer, + encryptionAlgorithm: z.string(), + version: z.number().default(1), + kmsKeyId: z.string().uuid() +}); + +export type TInternalKms = z.infer; +export type TInternalKmsInsert = Omit, TImmutableDBKeys>; +export type TInternalKmsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/kms-keys.ts b/backend/src/db/schemas/kms-keys.ts index 503c270d9a..dffaeec24c 100644 --- a/backend/src/db/schemas/kms-keys.ts +++ b/backend/src/db/schemas/kms-keys.ts @@ -5,20 +5,19 @@ import { z } from "zod"; -import { zodBuffer } from "@app/lib/zod"; - import { TImmutableDBKeys } from "./models"; export const KmsKeysSchema = z.object({ id: z.string().uuid(), - encryptedKey: zodBuffer, - encryptionAlgorithm: z.string(), - version: z.number().default(1), description: z.string().nullable().optional(), isDisabled: z.boolean().default(false).nullable().optional(), isReserved: z.boolean().default(true).nullable().optional(), + orgId: z.string().uuid(), + name: z.string(), + createdAt: z.date(), + updatedAt: z.date(), projectId: z.string().nullable().optional(), - orgId: z.string().uuid().nullable().optional() + slug: z.string().nullable().optional() }); export type TKmsKeys = z.infer; diff --git a/backend/src/db/schemas/kms-root-config.ts b/backend/src/db/schemas/kms-root-config.ts index d2c0edbc5e..d15e1dff89 100644 --- a/backend/src/db/schemas/kms-root-config.ts +++ b/backend/src/db/schemas/kms-root-config.ts @@ -11,7 +11,10 @@ import { TImmutableDBKeys } from "./models"; export const KmsRootConfigSchema = z.object({ id: z.string().uuid(), - encryptedRootKey: zodBuffer + encryptedRootKey: zodBuffer, + encryptionStrategy: z.string(), + createdAt: z.date(), + updatedAt: z.date() }); export type TKmsRootConfig = z.infer; diff --git a/backend/src/db/schemas/ldap-configs.ts b/backend/src/db/schemas/ldap-configs.ts index 86fd6acb6a..460c2cff66 100644 --- a/backend/src/db/schemas/ldap-configs.ts +++ b/backend/src/db/schemas/ldap-configs.ts @@ -26,7 +26,8 @@ export const LdapConfigsSchema = z.object({ updatedAt: z.date(), groupSearchBase: z.string().default(""), groupSearchFilter: z.string().default(""), - searchFilter: z.string().default("") + searchFilter: z.string().default(""), + uniqueUserAttribute: z.string().default("") }); export type TLdapConfigs = z.infer; diff --git a/backend/src/db/schemas/models.ts b/backend/src/db/schemas/models.ts index f9c8436dfd..171931f7ea 100644 --- a/backend/src/db/schemas/models.ts +++ b/backend/src/db/schemas/models.ts @@ -2,9 +2,22 @@ import { z } from "zod"; export enum TableName { Users = "users", + CertificateAuthority = "certificate_authorities", + CertificateTemplateEstConfig = "certificate_template_est_configs", + CertificateAuthorityCert = "certificate_authority_certs", + CertificateAuthoritySecret = "certificate_authority_secret", + CertificateAuthorityCrl = "certificate_authority_crl", + Certificate = "certificates", + CertificateBody = "certificate_bodies", + CertificateSecret = "certificate_secrets", + CertificateTemplate = "certificate_templates", + PkiAlert = "pki_alerts", + PkiCollection = "pki_collections", + PkiCollectionItem = "pki_collection_items", Groups = "groups", GroupProjectMembership = "group_project_memberships", GroupProjectMembershipRole = "group_project_membership_roles", + ExternalGroupOrgRoleMapping = "external_group_org_role_mappings", UserGroupMembership = "user_group_membership", UserAliases = "user_aliases", UserEncryptionKey = "user_encryption_keys", @@ -18,6 +31,7 @@ export enum TableName { IncidentContact = "incident_contacts", UserAction = "user_actions", SuperAdmin = "super_admin", + RateLimit = "rate_limit", ApiKey = "api_keys", Project = "projects", ProjectBot = "project_bots", @@ -27,6 +41,7 @@ export enum TableName { ProjectUserAdditionalPrivilege = "project_user_additional_privilege", ProjectUserMembershipRole = "project_user_membership_roles", ProjectKeys = "project_keys", + ProjectTemplates = "project_templates", Secret = "secrets", SecretReference = "secret_references", SecretSharing = "secret_sharing", @@ -45,16 +60,20 @@ export enum TableName { Webhook = "webhooks", Identity = "identities", IdentityAccessToken = "identity_access_tokens", + IdentityTokenAuth = "identity_token_auths", IdentityUniversalAuth = "identity_universal_auths", IdentityKubernetesAuth = "identity_kubernetes_auths", IdentityGcpAuth = "identity_gcp_auths", IdentityAzureAuth = "identity_azure_auths", IdentityUaClientSecret = "identity_ua_client_secrets", IdentityAwsAuth = "identity_aws_auths", + IdentityOidcAuth = "identity_oidc_auths", IdentityOrgMembership = "identity_org_memberships", IdentityProjectMembership = "identity_project_memberships", IdentityProjectMembershipRole = "identity_project_membership_role", IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege", + // used by both identity and users + IdentityMetadata = "identity_metadata", ScimToken = "scim_tokens", AccessApprovalPolicy = "access_approval_policies", AccessApprovalPolicyApprover = "access_approval_policies_approvers", @@ -70,6 +89,7 @@ export enum TableName { SecretRotationOutput = "secret_rotation_outputs", SamlConfig = "saml_configs", LdapConfig = "ldap_configs", + OidcConfig = "oidc_configs", LdapGroupMap = "ldap_group_maps", AuditLog = "audit_logs", AuditLogStream = "audit_log_streams", @@ -79,13 +99,30 @@ export enum TableName { TrustedIps = "trusted_ips", DynamicSecret = "dynamic_secrets", DynamicSecretLease = "dynamic_secret_leases", + SecretV2 = "secrets_v2", + SecretReferenceV2 = "secret_references_v2", + SecretVersionV2 = "secret_versions_v2", + SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2", + SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2", + SnapshotSecretV2 = "secret_snapshot_secrets_v2", // junction tables with tags + SecretV2JnTag = "secret_v2_tag_junction", JnSecretTag = "secret_tag_junction", SecretVersionTag = "secret_version_tag_junction", + SecretVersionV2Tag = "secret_version_v2_tag_junction", + SecretRotationOutputV2 = "secret_rotation_output_v2", // KMS Service KmsServerRootConfig = "kms_root_config", KmsKey = "kms_keys", - KmsKeyVersion = "kms_key_versions" + ExternalKms = "external_kms", + InternalKms = "internal_kms", + InternalKmsKeyVersion = "internal_kms_key_version", + TotpConfig = "totp_configs", + // @depreciated + KmsKeyVersion = "kms_key_versions", + WorkflowIntegrations = "workflow_integrations", + SlackIntegrations = "slack_integrations", + ProjectSlackConfigs = "project_slack_configs" } export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt"; @@ -142,7 +179,8 @@ export enum SecretType { export enum ProjectVersion { V1 = 1, - V2 = 2 + V2 = 2, + V3 = 3 } export enum ProjectUpgradeStatus { @@ -152,9 +190,11 @@ export enum ProjectUpgradeStatus { } export enum IdentityAuthMethod { - Univeral = "universal-auth", + TOKEN_AUTH = "token-auth", + UNIVERSAL_AUTH = "universal-auth", KUBERNETES_AUTH = "kubernetes-auth", GCP_AUTH = "gcp-auth", AWS_AUTH = "aws-auth", - AZURE_AUTH = "azure-auth" + AZURE_AUTH = "azure-auth", + OIDC_AUTH = "oidc-auth" } diff --git a/backend/src/db/schemas/oidc-configs.ts b/backend/src/db/schemas/oidc-configs.ts new file mode 100644 index 0000000000..e8030267d7 --- /dev/null +++ b/backend/src/db/schemas/oidc-configs.ts @@ -0,0 +1,35 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const OidcConfigsSchema = z.object({ + id: z.string().uuid(), + discoveryURL: z.string().nullable().optional(), + issuer: z.string().nullable().optional(), + authorizationEndpoint: z.string().nullable().optional(), + jwksUri: z.string().nullable().optional(), + tokenEndpoint: z.string().nullable().optional(), + userinfoEndpoint: z.string().nullable().optional(), + encryptedClientId: z.string(), + configurationType: z.string(), + clientIdIV: z.string(), + clientIdTag: z.string(), + encryptedClientSecret: z.string(), + clientSecretIV: z.string(), + clientSecretTag: z.string(), + allowedEmailDomains: z.string().nullable().optional(), + isActive: z.boolean(), + createdAt: z.date(), + updatedAt: z.date(), + orgId: z.string().uuid(), + lastUsed: z.date().nullable().optional() +}); + +export type TOidcConfigs = z.infer; +export type TOidcConfigsInsert = Omit, TImmutableDBKeys>; +export type TOidcConfigsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/org-memberships.ts b/backend/src/db/schemas/org-memberships.ts index 585addb7ce..e77b6e9c92 100644 --- a/backend/src/db/schemas/org-memberships.ts +++ b/backend/src/db/schemas/org-memberships.ts @@ -16,7 +16,9 @@ export const OrgMembershipsSchema = z.object({ updatedAt: z.date(), userId: z.string().uuid().nullable().optional(), orgId: z.string().uuid(), - roleId: z.string().uuid().nullable().optional() + roleId: z.string().uuid().nullable().optional(), + projectFavorites: z.string().array().nullable().optional(), + isActive: z.boolean().default(true) }); export type TOrgMemberships = z.infer; diff --git a/backend/src/db/schemas/organizations.ts b/backend/src/db/schemas/organizations.ts index f2933af866..3f40447add 100644 --- a/backend/src/db/schemas/organizations.ts +++ b/backend/src/db/schemas/organizations.ts @@ -5,6 +5,8 @@ import { z } from "zod"; +import { zodBuffer } from "@app/lib/zod"; + import { TImmutableDBKeys } from "./models"; export const OrganizationsSchema = z.object({ @@ -15,7 +17,12 @@ export const OrganizationsSchema = z.object({ createdAt: z.date(), updatedAt: z.date(), authEnforced: z.boolean().default(false).nullable().optional(), - scimEnabled: z.boolean().default(false).nullable().optional() + scimEnabled: z.boolean().default(false).nullable().optional(), + kmsDefaultKeyId: z.string().uuid().nullable().optional(), + kmsEncryptedDataKey: zodBuffer.nullable().optional(), + defaultMembershipRole: z.string().default("member"), + enforceMfa: z.boolean().default(false), + selectedMfaMethod: z.string().nullable().optional() }); export type TOrganizations = z.infer; diff --git a/backend/src/db/schemas/pki-alerts.ts b/backend/src/db/schemas/pki-alerts.ts new file mode 100644 index 0000000000..7bc9d2d7ca --- /dev/null +++ b/backend/src/db/schemas/pki-alerts.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const PkiAlertsSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + projectId: z.string(), + pkiCollectionId: z.string().uuid(), + name: z.string(), + alertBeforeDays: z.number(), + recipientEmails: z.string() +}); + +export type TPkiAlerts = z.infer; +export type TPkiAlertsInsert = Omit, TImmutableDBKeys>; +export type TPkiAlertsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/pki-collection-items.ts b/backend/src/db/schemas/pki-collection-items.ts new file mode 100644 index 0000000000..f04f5a1ee0 --- /dev/null +++ b/backend/src/db/schemas/pki-collection-items.ts @@ -0,0 +1,21 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const PkiCollectionItemsSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + pkiCollectionId: z.string().uuid(), + caId: z.string().uuid().nullable().optional(), + certId: z.string().uuid().nullable().optional() +}); + +export type TPkiCollectionItems = z.infer; +export type TPkiCollectionItemsInsert = Omit, TImmutableDBKeys>; +export type TPkiCollectionItemsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/pki-collections.ts b/backend/src/db/schemas/pki-collections.ts new file mode 100644 index 0000000000..0f9a5675fb --- /dev/null +++ b/backend/src/db/schemas/pki-collections.ts @@ -0,0 +1,21 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const PkiCollectionsSchema = z.object({ + id: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date(), + projectId: z.string(), + name: z.string(), + description: z.string() +}); + +export type TPkiCollections = z.infer; +export type TPkiCollectionsInsert = Omit, TImmutableDBKeys>; +export type TPkiCollectionsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/project-roles.ts b/backend/src/db/schemas/project-roles.ts index e10f6fd4c5..37eb58fcb0 100644 --- a/backend/src/db/schemas/project-roles.ts +++ b/backend/src/db/schemas/project-roles.ts @@ -15,7 +15,8 @@ export const ProjectRolesSchema = z.object({ permissions: z.unknown(), createdAt: z.date(), updatedAt: z.date(), - projectId: z.string() + projectId: z.string(), + version: z.number().default(1) }); export type TProjectRoles = z.infer; diff --git a/backend/src/db/schemas/project-slack-configs.ts b/backend/src/db/schemas/project-slack-configs.ts new file mode 100644 index 0000000000..0a46e5aae7 --- /dev/null +++ b/backend/src/db/schemas/project-slack-configs.ts @@ -0,0 +1,24 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const ProjectSlackConfigsSchema = z.object({ + id: z.string().uuid(), + projectId: z.string(), + slackIntegrationId: z.string().uuid(), + isAccessRequestNotificationEnabled: z.boolean().default(false), + accessRequestChannels: z.string().default(""), + isSecretRequestNotificationEnabled: z.boolean().default(false), + secretRequestChannels: z.string().default(""), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TProjectSlackConfigs = z.infer; +export type TProjectSlackConfigsInsert = Omit, TImmutableDBKeys>; +export type TProjectSlackConfigsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/project-templates.ts b/backend/src/db/schemas/project-templates.ts new file mode 100644 index 0000000000..68f37d256d --- /dev/null +++ b/backend/src/db/schemas/project-templates.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const ProjectTemplatesSchema = z.object({ + id: z.string().uuid(), + name: z.string(), + description: z.string().nullable().optional(), + roles: z.unknown(), + environments: z.unknown(), + orgId: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TProjectTemplates = z.infer; +export type TProjectTemplatesInsert = Omit, TImmutableDBKeys>; +export type TProjectTemplatesUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/project-user-additional-privilege.ts b/backend/src/db/schemas/project-user-additional-privilege.ts index 0fd0e5faad..e657fc9454 100644 --- a/backend/src/db/schemas/project-user-additional-privilege.ts +++ b/backend/src/db/schemas/project-user-additional-privilege.ts @@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models"; export const ProjectUserAdditionalPrivilegeSchema = z.object({ id: z.string().uuid(), slug: z.string(), - projectMembershipId: z.string().uuid(), + projectMembershipId: z.string().uuid().nullable().optional(), isTemporary: z.boolean().default(false), temporaryMode: z.string().nullable().optional(), temporaryRange: z.string().nullable().optional(), @@ -18,7 +18,9 @@ export const ProjectUserAdditionalPrivilegeSchema = z.object({ temporaryAccessEndTime: z.date().nullable().optional(), permissions: z.unknown(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + userId: z.string().uuid(), + projectId: z.string() }); export type TProjectUserAdditionalPrivilege = z.infer; diff --git a/backend/src/db/schemas/projects.ts b/backend/src/db/schemas/projects.ts index 3965e24c0a..deba51b9a4 100644 --- a/backend/src/db/schemas/projects.ts +++ b/backend/src/db/schemas/projects.ts @@ -5,6 +5,8 @@ import { z } from "zod"; +import { zodBuffer } from "@app/lib/zod"; + import { TImmutableDBKeys } from "./models"; export const ProjectsSchema = z.object({ @@ -16,7 +18,12 @@ export const ProjectsSchema = z.object({ createdAt: z.date(), updatedAt: z.date(), version: z.number().default(1), - upgradeStatus: z.string().nullable().optional() + upgradeStatus: z.string().nullable().optional(), + pitVersionLimit: z.number().default(10), + kmsCertificateKeyId: z.string().uuid().nullable().optional(), + auditLogsRetentionDays: z.number().nullable().optional(), + kmsSecretManagerKeyId: z.string().uuid().nullable().optional(), + kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional() }); export type TProjects = z.infer; diff --git a/backend/src/db/schemas/rate-limit.ts b/backend/src/db/schemas/rate-limit.ts new file mode 100644 index 0000000000..233f6cdbc4 --- /dev/null +++ b/backend/src/db/schemas/rate-limit.ts @@ -0,0 +1,25 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const RateLimitSchema = z.object({ + id: z.string().uuid(), + readRateLimit: z.number().default(600), + writeRateLimit: z.number().default(200), + secretsRateLimit: z.number().default(60), + authRateLimit: z.number().default(60), + inviteUserRateLimit: z.number().default(30), + mfaRateLimit: z.number().default(20), + publicEndpointLimit: z.number().default(30), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TRateLimit = z.infer; +export type TRateLimitInsert = Omit, TImmutableDBKeys>; +export type TRateLimitUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/secret-approval-policies-approvers.ts b/backend/src/db/schemas/secret-approval-policies-approvers.ts index 12a3119e6d..f9aebf0194 100644 --- a/backend/src/db/schemas/secret-approval-policies-approvers.ts +++ b/backend/src/db/schemas/secret-approval-policies-approvers.ts @@ -9,10 +9,11 @@ import { TImmutableDBKeys } from "./models"; export const SecretApprovalPoliciesApproversSchema = z.object({ id: z.string().uuid(), - approverId: z.string().uuid(), policyId: z.string().uuid(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + approverUserId: z.string().uuid().nullable().optional(), + approverGroupId: z.string().uuid().nullable().optional() }); export type TSecretApprovalPoliciesApprovers = z.infer; diff --git a/backend/src/db/schemas/secret-approval-policies.ts b/backend/src/db/schemas/secret-approval-policies.ts index d907ef1e0f..94aeba0509 100644 --- a/backend/src/db/schemas/secret-approval-policies.ts +++ b/backend/src/db/schemas/secret-approval-policies.ts @@ -14,7 +14,8 @@ export const SecretApprovalPoliciesSchema = z.object({ approvals: z.number().default(1), envId: z.string().uuid(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + enforcementLevel: z.string().default("hard") }); export type TSecretApprovalPolicies = z.infer; diff --git a/backend/src/db/schemas/secret-approval-request-secret-tags-v2.ts b/backend/src/db/schemas/secret-approval-request-secret-tags-v2.ts new file mode 100644 index 0000000000..6ded05a5dd --- /dev/null +++ b/backend/src/db/schemas/secret-approval-request-secret-tags-v2.ts @@ -0,0 +1,25 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretApprovalRequestSecretTagsV2Schema = z.object({ + id: z.string().uuid(), + secretId: z.string().uuid(), + tagId: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TSecretApprovalRequestSecretTagsV2 = z.infer; +export type TSecretApprovalRequestSecretTagsV2Insert = Omit< + z.input, + TImmutableDBKeys +>; +export type TSecretApprovalRequestSecretTagsV2Update = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/secret-approval-requests-reviewers.ts b/backend/src/db/schemas/secret-approval-requests-reviewers.ts index f3ff880473..a5c4455878 100644 --- a/backend/src/db/schemas/secret-approval-requests-reviewers.ts +++ b/backend/src/db/schemas/secret-approval-requests-reviewers.ts @@ -9,11 +9,11 @@ import { TImmutableDBKeys } from "./models"; export const SecretApprovalRequestsReviewersSchema = z.object({ id: z.string().uuid(), - member: z.string().uuid(), status: z.string(), requestId: z.string().uuid(), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + reviewerUserId: z.string().uuid() }); export type TSecretApprovalRequestsReviewers = z.infer; diff --git a/backend/src/db/schemas/secret-approval-requests-secrets-v2.ts b/backend/src/db/schemas/secret-approval-requests-secrets-v2.ts new file mode 100644 index 0000000000..ee25ed6ef6 --- /dev/null +++ b/backend/src/db/schemas/secret-approval-requests-secrets-v2.ts @@ -0,0 +1,37 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretApprovalRequestsSecretsV2Schema = z.object({ + id: z.string().uuid(), + version: z.number().default(1).nullable().optional(), + key: z.string(), + encryptedValue: zodBuffer.nullable().optional(), + encryptedComment: zodBuffer.nullable().optional(), + reminderNote: z.string().nullable().optional(), + reminderRepeatDays: z.number().nullable().optional(), + skipMultilineEncoding: z.boolean().default(false).nullable().optional(), + metadata: z.unknown().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date(), + requestId: z.string().uuid(), + op: z.string(), + secretId: z.string().uuid().nullable().optional(), + secretVersion: z.string().uuid().nullable().optional() +}); + +export type TSecretApprovalRequestsSecretsV2 = z.infer; +export type TSecretApprovalRequestsSecretsV2Insert = Omit< + z.input, + TImmutableDBKeys +>; +export type TSecretApprovalRequestsSecretsV2Update = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/secret-approval-requests.ts b/backend/src/db/schemas/secret-approval-requests.ts index 77ad370b7f..218a0f922f 100644 --- a/backend/src/db/schemas/secret-approval-requests.ts +++ b/backend/src/db/schemas/secret-approval-requests.ts @@ -15,11 +15,12 @@ export const SecretApprovalRequestsSchema = z.object({ conflicts: z.unknown().nullable().optional(), slug: z.string(), folderId: z.string().uuid(), - statusChangeBy: z.string().uuid().nullable().optional(), - committerId: z.string().uuid(), createdAt: z.date(), updatedAt: z.date(), - isReplicated: z.boolean().nullable().optional() + isReplicated: z.boolean().nullable().optional(), + committerUserId: z.string().uuid(), + statusChangedByUserId: z.string().uuid().nullable().optional(), + bypassReason: z.string().nullable().optional() }); export type TSecretApprovalRequests = z.infer; diff --git a/backend/src/db/schemas/secret-references-v2.ts b/backend/src/db/schemas/secret-references-v2.ts new file mode 100644 index 0000000000..ed339e65c5 --- /dev/null +++ b/backend/src/db/schemas/secret-references-v2.ts @@ -0,0 +1,20 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretReferencesV2Schema = z.object({ + id: z.string().uuid(), + environment: z.string(), + secretPath: z.string(), + secretKey: z.string(), + secretId: z.string().uuid() +}); + +export type TSecretReferencesV2 = z.infer; +export type TSecretReferencesV2Insert = Omit, TImmutableDBKeys>; +export type TSecretReferencesV2Update = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/secret-rotation-output-v2.ts b/backend/src/db/schemas/secret-rotation-output-v2.ts new file mode 100644 index 0000000000..28d45413a7 --- /dev/null +++ b/backend/src/db/schemas/secret-rotation-output-v2.ts @@ -0,0 +1,21 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretRotationOutputV2Schema = z.object({ + id: z.string().uuid(), + key: z.string(), + secretId: z.string().uuid(), + rotationId: z.string().uuid() +}); + +export type TSecretRotationOutputV2 = z.infer; +export type TSecretRotationOutputV2Insert = Omit, TImmutableDBKeys>; +export type TSecretRotationOutputV2Update = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/secret-sharing.ts b/backend/src/db/schemas/secret-sharing.ts index 6fa104ebeb..d47f288b22 100644 --- a/backend/src/db/schemas/secret-sharing.ts +++ b/backend/src/db/schemas/secret-sharing.ts @@ -5,20 +5,28 @@ import { z } from "zod"; +import { zodBuffer } from "@app/lib/zod"; + import { TImmutableDBKeys } from "./models"; export const SecretSharingSchema = z.object({ id: z.string().uuid(), - encryptedValue: z.string(), - iv: z.string(), - tag: z.string(), - hashedHex: z.string(), + encryptedValue: z.string().nullable().optional(), + iv: z.string().nullable().optional(), + tag: z.string().nullable().optional(), + hashedHex: z.string().nullable().optional(), expiresAt: z.date(), - userId: z.string().uuid(), - orgId: z.string().uuid(), + userId: z.string().uuid().nullable().optional(), + orgId: z.string().uuid().nullable().optional(), createdAt: z.date(), updatedAt: z.date(), - expiresAfterViews: z.number().nullable().optional() + expiresAfterViews: z.number().nullable().optional(), + accessType: z.string().default("anyone"), + name: z.string().nullable().optional(), + lastViewedAt: z.date().nullable().optional(), + password: z.string().nullable().optional(), + encryptedSecret: zodBuffer.nullable().optional(), + identifier: z.string().nullable().optional() }); export type TSecretSharing = z.infer; diff --git a/backend/src/db/schemas/secret-snapshot-secrets-v2.ts b/backend/src/db/schemas/secret-snapshot-secrets-v2.ts new file mode 100644 index 0000000000..d643f7f8c7 --- /dev/null +++ b/backend/src/db/schemas/secret-snapshot-secrets-v2.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretSnapshotSecretsV2Schema = z.object({ + id: z.string().uuid(), + envId: z.string().uuid(), + secretVersionId: z.string().uuid(), + snapshotId: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TSecretSnapshotSecretsV2 = z.infer; +export type TSecretSnapshotSecretsV2Insert = Omit, TImmutableDBKeys>; +export type TSecretSnapshotSecretsV2Update = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/secret-tags.ts b/backend/src/db/schemas/secret-tags.ts index f94e1e2629..7ba7204a4d 100644 --- a/backend/src/db/schemas/secret-tags.ts +++ b/backend/src/db/schemas/secret-tags.ts @@ -9,13 +9,13 @@ import { TImmutableDBKeys } from "./models"; export const SecretTagsSchema = z.object({ id: z.string().uuid(), - name: z.string(), slug: z.string(), color: z.string().nullable().optional(), createdAt: z.date(), updatedAt: z.date(), createdBy: z.string().uuid().nullable().optional(), - projectId: z.string() + projectId: z.string(), + createdByActorType: z.string().default("user") }); export type TSecretTags = z.infer; diff --git a/backend/src/db/schemas/secret-v2-tag-junction.ts b/backend/src/db/schemas/secret-v2-tag-junction.ts new file mode 100644 index 0000000000..982e11f505 --- /dev/null +++ b/backend/src/db/schemas/secret-v2-tag-junction.ts @@ -0,0 +1,18 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretV2TagJunctionSchema = z.object({ + id: z.string().uuid(), + secrets_v2Id: z.string().uuid(), + secret_tagsId: z.string().uuid() +}); + +export type TSecretV2TagJunction = z.infer; +export type TSecretV2TagJunctionInsert = Omit, TImmutableDBKeys>; +export type TSecretV2TagJunctionUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/secret-version-v2-tag-junction.ts b/backend/src/db/schemas/secret-version-v2-tag-junction.ts new file mode 100644 index 0000000000..4679116a0c --- /dev/null +++ b/backend/src/db/schemas/secret-version-v2-tag-junction.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretVersionV2TagJunctionSchema = z.object({ + id: z.string().uuid(), + secret_versions_v2Id: z.string().uuid(), + secret_tagsId: z.string().uuid() +}); + +export type TSecretVersionV2TagJunction = z.infer; +export type TSecretVersionV2TagJunctionInsert = Omit< + z.input, + TImmutableDBKeys +>; +export type TSecretVersionV2TagJunctionUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/secret-versions-v2.ts b/backend/src/db/schemas/secret-versions-v2.ts new file mode 100644 index 0000000000..160ed1c144 --- /dev/null +++ b/backend/src/db/schemas/secret-versions-v2.ts @@ -0,0 +1,33 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretVersionsV2Schema = z.object({ + id: z.string().uuid(), + version: z.number().default(1), + type: z.string().default("shared"), + key: z.string(), + encryptedValue: zodBuffer.nullable().optional(), + encryptedComment: zodBuffer.nullable().optional(), + reminderNote: z.string().nullable().optional(), + reminderRepeatDays: z.number().nullable().optional(), + skipMultilineEncoding: z.boolean().default(false).nullable().optional(), + metadata: z.unknown().nullable().optional(), + envId: z.string().uuid().nullable().optional(), + secretId: z.string().uuid(), + folderId: z.string().uuid(), + userId: z.string().uuid().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TSecretVersionsV2 = z.infer; +export type TSecretVersionsV2Insert = Omit, TImmutableDBKeys>; +export type TSecretVersionsV2Update = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/secrets-v2.ts b/backend/src/db/schemas/secrets-v2.ts new file mode 100644 index 0000000000..18fa3b4762 --- /dev/null +++ b/backend/src/db/schemas/secrets-v2.ts @@ -0,0 +1,31 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SecretsV2Schema = z.object({ + id: z.string().uuid(), + version: z.number().default(1), + type: z.string().default("shared"), + key: z.string(), + encryptedValue: zodBuffer.nullable().optional(), + encryptedComment: zodBuffer.nullable().optional(), + reminderNote: z.string().nullable().optional(), + reminderRepeatDays: z.number().nullable().optional(), + skipMultilineEncoding: z.boolean().default(false).nullable().optional(), + metadata: z.unknown().nullable().optional(), + userId: z.string().uuid().nullable().optional(), + folderId: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TSecretsV2 = z.infer; +export type TSecretsV2Insert = Omit, TImmutableDBKeys>; +export type TSecretsV2Update = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/slack-integrations.ts b/backend/src/db/schemas/slack-integrations.ts new file mode 100644 index 0000000000..c989d4224c --- /dev/null +++ b/backend/src/db/schemas/slack-integrations.ts @@ -0,0 +1,27 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const SlackIntegrationsSchema = z.object({ + id: z.string().uuid(), + teamId: z.string(), + teamName: z.string(), + slackUserId: z.string(), + slackAppId: z.string(), + encryptedBotAccessToken: zodBuffer, + slackBotId: z.string(), + slackBotUserId: z.string(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TSlackIntegrations = z.infer; +export type TSlackIntegrationsInsert = Omit, TImmutableDBKeys>; +export type TSlackIntegrationsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/super-admin.ts b/backend/src/db/schemas/super-admin.ts index 417d4e05e6..edab3a0e9b 100644 --- a/backend/src/db/schemas/super-admin.ts +++ b/backend/src/db/schemas/super-admin.ts @@ -5,6 +5,8 @@ import { z } from "zod"; +import { zodBuffer } from "@app/lib/zod"; + import { TImmutableDBKeys } from "./models"; export const SuperAdminSchema = z.object({ @@ -16,7 +18,12 @@ export const SuperAdminSchema = z.object({ allowedSignUpDomain: z.string().nullable().optional(), instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000"), trustSamlEmails: z.boolean().default(false).nullable().optional(), - trustLdapEmails: z.boolean().default(false).nullable().optional() + trustLdapEmails: z.boolean().default(false).nullable().optional(), + trustOidcEmails: z.boolean().default(false).nullable().optional(), + defaultAuthOrgId: z.string().uuid().nullable().optional(), + enabledLoginMethods: z.string().array().nullable().optional(), + encryptedSlackClientId: zodBuffer.nullable().optional(), + encryptedSlackClientSecret: zodBuffer.nullable().optional() }); export type TSuperAdmin = z.infer; diff --git a/backend/src/db/schemas/totp-configs.ts b/backend/src/db/schemas/totp-configs.ts new file mode 100644 index 0000000000..d6ec115925 --- /dev/null +++ b/backend/src/db/schemas/totp-configs.ts @@ -0,0 +1,24 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const TotpConfigsSchema = z.object({ + id: z.string().uuid(), + userId: z.string().uuid(), + isVerified: z.boolean().default(false), + encryptedRecoveryCodes: zodBuffer, + encryptedSecret: zodBuffer, + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TTotpConfigs = z.infer; +export type TTotpConfigsInsert = Omit, TImmutableDBKeys>; +export type TTotpConfigsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/user-encryption-keys.ts b/backend/src/db/schemas/user-encryption-keys.ts index 693b73b4c4..fd9d21a9d5 100644 --- a/backend/src/db/schemas/user-encryption-keys.ts +++ b/backend/src/db/schemas/user-encryption-keys.ts @@ -21,7 +21,12 @@ export const UserEncryptionKeysSchema = z.object({ tag: z.string(), salt: z.string(), verifier: z.string(), - userId: z.string().uuid() + userId: z.string().uuid(), + hashedPassword: z.string().nullable().optional(), + serverEncryptedPrivateKey: z.string().nullable().optional(), + serverEncryptedPrivateKeyIV: z.string().nullable().optional(), + serverEncryptedPrivateKeyTag: z.string().nullable().optional(), + serverEncryptedPrivateKeyEncoding: z.string().nullable().optional() }); export type TUserEncryptionKeys = z.infer; diff --git a/backend/src/db/schemas/users.ts b/backend/src/db/schemas/users.ts index 5134f3ee60..1c1f579ead 100644 --- a/backend/src/db/schemas/users.ts +++ b/backend/src/db/schemas/users.ts @@ -26,7 +26,8 @@ export const UsersSchema = z.object({ consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(), isLocked: z.boolean().default(false).nullable().optional(), temporaryLockDateEnd: z.date().nullable().optional(), - consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional() + consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(), + selectedMfaMethod: z.string().nullable().optional() }); export type TUsers = z.infer; diff --git a/backend/src/db/schemas/webhooks.ts b/backend/src/db/schemas/webhooks.ts index 44aa8c5da9..a7aac29339 100644 --- a/backend/src/db/schemas/webhooks.ts +++ b/backend/src/db/schemas/webhooks.ts @@ -21,7 +21,11 @@ export const WebhooksSchema = z.object({ keyEncoding: z.string().nullable().optional(), createdAt: z.date(), updatedAt: z.date(), - envId: z.string().uuid() + envId: z.string().uuid(), + urlCipherText: z.string().nullable().optional(), + urlIV: z.string().nullable().optional(), + urlTag: z.string().nullable().optional(), + type: z.string().default("general").nullable().optional() }); export type TWebhooks = z.infer; diff --git a/backend/src/db/schemas/workflow-integrations.ts b/backend/src/db/schemas/workflow-integrations.ts new file mode 100644 index 0000000000..ae1ae9a25c --- /dev/null +++ b/backend/src/db/schemas/workflow-integrations.ts @@ -0,0 +1,22 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const WorkflowIntegrationsSchema = z.object({ + id: z.string().uuid(), + integration: z.string(), + slug: z.string(), + orgId: z.string().uuid(), + description: z.string().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TWorkflowIntegrations = z.infer; +export type TWorkflowIntegrationsInsert = Omit, TImmutableDBKeys>; +export type TWorkflowIntegrationsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/seed-data.ts b/backend/src/db/seed-data.ts index 5f4ea1b4fa..47ef15d907 100644 --- a/backend/src/db/seed-data.ts +++ b/backend/src/db/seed-data.ts @@ -33,6 +33,11 @@ export const seedData1 = { name: "first project", slug: "first-project" }, + projectV3: { + id: "77fa7aed-9288-401e-a4c9-3a9430be62a4", + name: "first project v2", + slug: "first-project-v2" + }, environment: { name: "Development", slug: "dev" diff --git a/backend/src/db/seeds/2-org.ts b/backend/src/db/seeds/2-org.ts index ba2f65a36f..a02224dbca 100644 --- a/backend/src/db/seeds/2-org.ts +++ b/backend/src/db/seeds/2-org.ts @@ -29,7 +29,8 @@ export async function seed(knex: Knex): Promise { role: OrgMembershipRole.Admin, orgId: org.id, status: OrgMembershipStatus.Accepted, - userId: user.id + userId: user.id, + isActive: true } ]); } diff --git a/backend/src/db/seeds/4-project-v3.ts b/backend/src/db/seeds/4-project-v3.ts new file mode 100644 index 0000000000..60431919db --- /dev/null +++ b/backend/src/db/seeds/4-project-v3.ts @@ -0,0 +1,50 @@ +import { Knex } from "knex"; + +import { ProjectMembershipRole, ProjectVersion, TableName } from "../schemas"; +import { seedData1 } from "../seed-data"; + +export const DEFAULT_PROJECT_ENVS = [ + { name: "Development", slug: "dev" }, + { name: "Staging", slug: "staging" }, + { name: "Production", slug: "prod" } +]; + +export async function seed(knex: Knex): Promise { + const [projectV2] = await knex(TableName.Project) + .insert({ + name: seedData1.projectV3.name, + orgId: seedData1.organization.id, + slug: seedData1.projectV3.slug, + version: ProjectVersion.V3, + // eslint-disable-next-line + // @ts-ignore + id: seedData1.projectV3.id + }) + .returning("*"); + + const projectMembershipV3 = await knex(TableName.ProjectMembership) + .insert({ + projectId: projectV2.id, + userId: seedData1.id + }) + .returning("*"); + await knex(TableName.ProjectUserMembershipRole).insert({ + role: ProjectMembershipRole.Admin, + projectMembershipId: projectMembershipV3[0].id + }); + + // create default environments and default folders + const projectV3Envs = await knex(TableName.Environment) + .insert( + DEFAULT_PROJECT_ENVS.map(({ name, slug }, index) => ({ + name, + slug, + projectId: seedData1.projectV3.id, + position: index + 1 + })) + ) + .returning("*"); + await knex(TableName.SecretFolder).insert( + projectV3Envs.map(({ id }) => ({ name: "root", envId: id, parentId: null })) + ); +} diff --git a/backend/src/db/seeds/4-machine-identity.ts b/backend/src/db/seeds/5-machine-identity.ts similarity index 85% rename from backend/src/db/seeds/4-machine-identity.ts rename to backend/src/db/seeds/5-machine-identity.ts index 662232e02f..3798d4bf32 100644 --- a/backend/src/db/seeds/4-machine-identity.ts +++ b/backend/src/db/seeds/5-machine-identity.ts @@ -16,7 +16,7 @@ export async function seed(knex: Knex): Promise { // @ts-ignore id: seedData1.machineIdentity.id, name: seedData1.machineIdentity.name, - authMethod: IdentityAuthMethod.Univeral + authMethod: IdentityAuthMethod.UNIVERSAL_AUTH } ]); const identityUa = await knex(TableName.IdentityUniversalAuth) @@ -86,4 +86,15 @@ export async function seed(knex: Knex): Promise { role: ProjectMembershipRole.Admin, projectMembershipId: identityProjectMembership[0].id }); + const identityProjectMembershipV3 = await knex(TableName.IdentityProjectMembership) + .insert({ + identityId: seedData1.machineIdentity.id, + projectId: seedData1.projectV3.id + }) + .returning("*"); + + await knex(TableName.IdentityProjectMembershipRole).insert({ + role: ProjectMembershipRole.Admin, + projectMembershipId: identityProjectMembershipV3[0].id + }); } diff --git a/backend/src/db/utils.ts b/backend/src/db/utils.ts index 68c4005963..e06cdd3f19 100644 --- a/backend/src/db/utils.ts +++ b/backend/src/db/utils.ts @@ -2,6 +2,9 @@ import { Knex } from "knex"; import { TableName } from "./schemas"; +interface PgTriggerResult { + rows: Array<{ exists: boolean }>; +} export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) => knex.schema.createTable(tableName, (table) => { table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); @@ -28,13 +31,26 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE; // we would be using this to apply updatedAt where ever we wanta // remember to set `timestamps(true,true,true)` before this on schema -export const createOnUpdateTrigger = (knex: Knex, tableName: string) => - knex.raw(` -CREATE TRIGGER "${tableName}_updatedAt" -BEFORE UPDATE ON ${tableName} -FOR EACH ROW -EXECUTE PROCEDURE on_update_timestamp(); -`); +export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => { + const triggerExists = await knex.raw(` + SELECT EXISTS ( + SELECT 1 + FROM pg_trigger + WHERE tgname = '${tableName}_updatedAt' + ); + `); + + if (!triggerExists?.rows?.[0]?.exists) { + return knex.raw(` + CREATE TRIGGER "${tableName}_updatedAt" + BEFORE UPDATE ON ${tableName} + FOR EACH ROW + EXECUTE PROCEDURE on_update_timestamp(); + `); + } + + return null; +}; export const dropOnUpdateTrigger = (knex: Knex, tableName: string) => knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`); diff --git a/backend/src/ee/routes/est/certificate-est-router.ts b/backend/src/ee/routes/est/certificate-est-router.ts new file mode 100644 index 0000000000..7f401216e4 --- /dev/null +++ b/backend/src/ee/routes/est/certificate-est-router.ts @@ -0,0 +1,173 @@ +import bcrypt from "bcrypt"; +import { z } from "zod"; + +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; + +export const registerCertificateEstRouter = async (server: FastifyZodProvider) => { + const appCfg = getConfig(); + + // add support for CSR bodies + server.addContentTypeParser("application/pkcs10", { parseAs: "string" }, (_, body, done) => { + try { + let csrBody = body as string; + // some EST clients send CSRs in PEM format and some in base64 format + // for CSRs sent in PEM, we leave them as is + // for CSRs sent in base64, we preprocess them to remove new lines and spaces + if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) { + csrBody = csrBody.replace(/\n/g, "").replace(/ /g, ""); + } + + done(null, csrBody); + } catch (err) { + const error = err as Error; + done(error, undefined); + } + }); + + // Authenticate EST client using Passphrase + server.addHook("onRequest", async (req, res) => { + const { authorization } = req.headers; + const urlFragments = req.url.split("/"); + + // cacerts endpoint should not have any authentication + if (urlFragments[urlFragments.length - 1] === "cacerts") { + return; + } + + if (!authorization) { + const wwwAuthenticateHeader = "WWW-Authenticate"; + const errAuthRequired = "Authentication required"; + + await res.hijack(); + + // definitive connection timeout to clean-up open connections and prevent memory leak + res.raw.setTimeout(10 * 1000, () => { + res.raw.end(); + }); + + res.raw.setHeader(wwwAuthenticateHeader, `Basic realm="infisical"`); + res.raw.setHeader("Content-Length", 0); + res.raw.statusCode = 401; + + // Write the error message to the response without ending the connection + res.raw.write(errAuthRequired); + + // flush headers + res.raw.flushHeaders(); + return; + } + + const certificateTemplateId = urlFragments.slice(-2)[0]; + const estConfig = await server.services.certificateTemplate.getEstConfiguration({ + isInternal: true, + certificateTemplateId + }); + + if (!estConfig.isEnabled) { + throw new BadRequestError({ + message: "EST is disabled" + }); + } + + const rawCredential = authorization?.split(" ").pop(); + if (!rawCredential) { + throw new UnauthorizedError({ message: "Missing HTTP credentials" }); + } + + // expected format is user:password + const basicCredential = atob(rawCredential); + const password = basicCredential.split(":").pop(); + if (!password) { + throw new BadRequestError({ + message: "No password provided" + }); + } + + const isPasswordValid = await bcrypt.compare(password, estConfig.hashedPassphrase); + if (!isPasswordValid) { + throw new UnauthorizedError({ + message: "Invalid credentials" + }); + } + }); + + server.route({ + method: "POST", + url: "/:certificateTemplateId/simpleenroll", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.string().min(1), + params: z.object({ + certificateTemplateId: z.string().min(1) + }), + response: { + 200: z.string() + } + }, + handler: async (req, res) => { + void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only"); + void res.header("Content-Transfer-Encoding", "base64"); + + return server.services.certificateEst.simpleEnroll({ + csr: req.body, + certificateTemplateId: req.params.certificateTemplateId, + sslClientCert: req.headers[appCfg.SSL_CLIENT_CERTIFICATE_HEADER_KEY] as string + }); + } + }); + + server.route({ + method: "POST", + url: "/:certificateTemplateId/simplereenroll", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.string().min(1), + params: z.object({ + certificateTemplateId: z.string().min(1) + }), + response: { + 200: z.string() + } + }, + handler: async (req, res) => { + void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only"); + void res.header("Content-Transfer-Encoding", "base64"); + + return server.services.certificateEst.simpleReenroll({ + csr: req.body, + certificateTemplateId: req.params.certificateTemplateId, + sslClientCert: req.headers[appCfg.SSL_CLIENT_CERTIFICATE_HEADER_KEY] as string + }); + } + }); + + server.route({ + method: "GET", + url: "/:certificateTemplateId/cacerts", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + certificateTemplateId: z.string().min(1) + }), + response: { + 200: z.string() + } + }, + handler: async (req, res) => { + void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only"); + void res.header("Content-Transfer-Encoding", "base64"); + + return server.services.certificateEst.getCaCerts({ + certificateTemplateId: req.params.certificateTemplateId + }); + } + }); +}; diff --git a/backend/src/ee/routes/v1/access-approval-policy-router.ts b/backend/src/ee/routes/v1/access-approval-policy-router.ts index 3b8949d3bb..814d19841a 100644 --- a/backend/src/ee/routes/v1/access-approval-policy-router.ts +++ b/backend/src/ee/routes/v1/access-approval-policy-router.ts @@ -1,6 +1,9 @@ import { nanoid } from "nanoid"; import { z } from "zod"; +import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types"; +import { EnforcementLevel } from "@app/lib/types"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { sapPubSchema } from "@app/server/routes/sanitizedSchemas"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -9,27 +12,32 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi server.route({ url: "/", method: "POST", + config: { + rateLimit: writeLimit + }, schema: { - body: z - .object({ - projectSlug: z.string().trim(), - name: z.string().optional(), - secretPath: z.string().trim().default("/"), - environment: z.string(), - approvers: z.string().array().min(1), - approvals: z.number().min(1).default(1) - }) - .refine((data) => data.approvals <= data.approvers.length, { - path: ["approvals"], - message: "The number of approvals should be lower than the number of approvers." - }), + body: z.object({ + projectSlug: z.string().trim(), + name: z.string().optional(), + secretPath: z.string().trim().default("/"), + environment: z.string(), + approvers: z + .discriminatedUnion("type", [ + z.object({ type: z.literal(ApproverType.Group), id: z.string() }), + z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() }) + ]) + .array() + .min(1, { message: "At least one approver should be provided" }), + approvals: z.number().min(1).default(1), + enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard) + }), response: { 200: z.object({ approval: sapPubSchema }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const approval = await server.services.accessApprovalPolicy.createAccessApprovalPolicy({ actor: req.permission.type, @@ -38,7 +46,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi actorOrgId: req.permission.orgId, ...req.body, projectSlug: req.body.projectSlug, - name: req.body.name ?? `${req.body.environment}-${nanoid(3)}` + name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`, + enforcementLevel: req.body.enforcementLevel }); return { approval }; } @@ -47,13 +56,26 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi server.route({ url: "/", method: "GET", + config: { + rateLimit: readLimit + }, schema: { querystring: z.object({ projectSlug: z.string().trim() }), response: { 200: z.object({ - approvals: sapPubSchema.extend({ approvers: z.string().array(), secretPath: z.string().optional() }).array() + approvals: sapPubSchema + .extend({ + approvers: z + .object({ type: z.nativeEnum(ApproverType), id: z.string().nullable().optional() }) + .array() + .nullable() + .optional() + }) + .array() + .nullable() + .optional() }) } }, @@ -66,6 +88,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi actorOrgId: req.permission.orgId, projectSlug: req.query.projectSlug }); + return { approvals }; } }); @@ -102,32 +125,37 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi server.route({ url: "/:policyId", method: "PATCH", + config: { + rateLimit: writeLimit + }, schema: { params: z.object({ policyId: z.string() }), - body: z - .object({ - name: z.string().optional(), - secretPath: z - .string() - .trim() - .optional() - .transform((val) => (val === "" ? "/" : val)), - approvers: z.string().array().min(1), - approvals: z.number().min(1).default(1) - }) - .refine((data) => data.approvals <= data.approvers.length, { - path: ["approvals"], - message: "The number of approvals should be lower than the number of approvers." - }), + body: z.object({ + name: z.string().optional(), + secretPath: z + .string() + .trim() + .optional() + .transform((val) => (val === "" ? "/" : val)), + approvers: z + .discriminatedUnion("type", [ + z.object({ type: z.literal(ApproverType.Group), id: z.string() }), + z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() }) + ]) + .array() + .min(1, { message: "At least one approver should be provided" }), + approvals: z.number().min(1).optional(), + enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard) + }), response: { 200: z.object({ approval: sapPubSchema }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { await server.services.accessApprovalPolicy.updateAccessApprovalPolicy({ policyId: req.params.policyId, @@ -143,6 +171,9 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi server.route({ url: "/:policyId", method: "DELETE", + config: { + rateLimit: writeLimit + }, schema: { params: z.object({ policyId: z.string() @@ -153,7 +184,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const approval = await server.services.accessApprovalPolicy.deleteAccessApprovalPolicy({ actor: req.permission.type, @@ -165,4 +196,44 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi return { approval }; } }); + + server.route({ + url: "/:policyId", + method: "GET", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + policyId: z.string() + }), + response: { + 200: z.object({ + approval: sapPubSchema.extend({ + approvers: z + .object({ + type: z.nativeEnum(ApproverType), + id: z.string().nullable().optional(), + name: z.string().nullable().optional() + }) + .array() + .nullable() + .optional() + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const approval = await server.services.accessApprovalPolicy.getAccessApprovalPolicyById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.params + }); + + return { approval }; + } + }); }; diff --git a/backend/src/ee/routes/v1/access-approval-request-router.ts b/backend/src/ee/routes/v1/access-approval-request-router.ts index 4b173cfa76..7dbb62fc22 100644 --- a/backend/src/ee/routes/v1/access-approval-request-router.ts +++ b/backend/src/ee/routes/v1/access-approval-request-router.ts @@ -1,10 +1,19 @@ import { z } from "zod"; -import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema } from "@app/db/schemas"; +import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas"; import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +const approvalRequestUser = z.object({ userId: z.string() }).merge( + UsersSchema.pick({ + email: true, + firstName: true, + lastName: true, + username: true + }) +); + export const registerAccessApprovalRequestRouter = async (server: FastifyZodProvider) => { server.route({ url: "/", @@ -99,14 +108,16 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv approvals: z.number(), approvers: z.string().array(), secretPath: z.string().nullish(), - envId: z.string() + envId: z.string(), + enforcementLevel: z.string() }), reviewers: z .object({ - member: z.string(), + userId: z.string(), status: z.string() }) - .array() + .array(), + requestedByUser: approvalRequestUser }).array() }) } diff --git a/backend/src/ee/routes/v1/certificate-authority-crl-router.ts b/backend/src/ee/routes/v1/certificate-authority-crl-router.ts new file mode 100644 index 0000000000..f617060252 --- /dev/null +++ b/backend/src/ee/routes/v1/certificate-authority-crl-router.ts @@ -0,0 +1,55 @@ +/* eslint-disable @typescript-eslint/no-floating-promises */ +import { z } from "zod"; + +import { CA_CRLS } from "@app/lib/api-docs"; +import { readLimit } from "@app/server/config/rateLimiter"; + +export const registerCaCrlRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/:crlId", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get CRL in DER format (deprecated)", + params: z.object({ + crlId: z.string().trim().describe(CA_CRLS.GET.crlId) + }), + response: { + 200: z.instanceof(Buffer) + } + }, + handler: async (req, res) => { + const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId); + + res.header("Content-Type", "application/pkix-crl"); + + return Buffer.from(crl); + } + }); + + server.route({ + method: "GET", + url: "/:crlId/der", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get CRL in DER format", + params: z.object({ + crlId: z.string().trim().describe(CA_CRLS.GET.crlId) + }), + response: { + 200: z.instanceof(Buffer) + } + }, + handler: async (req, res) => { + const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId); + + res.header("Content-Type", "application/pkix-crl"); + + return Buffer.from(crl); + } + }); +}; diff --git a/backend/src/ee/routes/v1/dynamic-secret-lease-router.ts b/backend/src/ee/routes/v1/dynamic-secret-lease-router.ts index 5ef9f7eeb8..c19af4d229 100644 --- a/backend/src/ee/routes/v1/dynamic-secret-lease-router.ts +++ b/backend/src/ee/routes/v1/dynamic-secret-lease-router.ts @@ -131,7 +131,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide .default("/") .transform(removeTrailingSlash) .describe(DYNAMIC_SECRET_LEASES.RENEW.path), - environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.ttl) + environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.environmentSlug) }), response: { 200: z.object({ diff --git a/backend/src/ee/routes/v1/dynamic-secret-router.ts b/backend/src/ee/routes/v1/dynamic-secret-router.ts index 049370743e..4b1566c557 100644 --- a/backend/src/ee/routes/v1/dynamic-secret-router.ts +++ b/backend/src/ee/routes/v1/dynamic-secret-router.ts @@ -77,6 +77,39 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => } }); + server.route({ + method: "POST", + url: "/entra-id/users", + config: { + rateLimit: readLimit + }, + schema: { + body: z.object({ + tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"), + applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"), + clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration") + }), + response: { + 200: z + .object({ + name: z.string().min(1).describe("The name of the user"), + id: z.string().min(1).describe("The ID of the user"), + email: z.string().min(1).describe("The email of the user") + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({ + tenantId: req.body.tenantId, + applicationId: req.body.applicationId, + clientSecret: req.body.clientSecret + }); + return data; + } + }); + server.route({ method: "PATCH", url: "/:name", @@ -237,7 +270,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const dynamicSecretCfgs = await server.services.dynamicSecret.list({ + const dynamicSecretCfgs = await server.services.dynamicSecret.listDynamicSecretsByEnv({ actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, diff --git a/backend/src/ee/routes/v1/external-kms-router.ts b/backend/src/ee/routes/v1/external-kms-router.ts new file mode 100644 index 0000000000..4e43d6ed91 --- /dev/null +++ b/backend/src/ee/routes/v1/external-kms-router.ts @@ -0,0 +1,289 @@ +import { z } from "zod"; + +import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { + ExternalKmsAwsSchema, + ExternalKmsInputSchema, + ExternalKmsInputUpdateSchema +} from "@app/ee/services/external-kms/providers/model"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const sanitizedExternalSchema = KmsKeysSchema.extend({ + external: ExternalKmsSchema.pick({ + id: true, + status: true, + statusDetails: true, + provider: true + }) +}); + +const sanitizedExternalSchemaForGetAll = KmsKeysSchema.pick({ + id: true, + description: true, + isDisabled: true, + createdAt: true, + updatedAt: true, + name: true +}) + .extend({ + externalKms: ExternalKmsSchema.pick({ + provider: true, + status: true, + statusDetails: true + }) + }) + .array(); + +const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({ + external: ExternalKmsSchema.pick({ + id: true, + status: true, + statusDetails: true, + provider: true + }).extend({ + providerInput: ExternalKmsAwsSchema + }) +}); + +export const registerExternalKmsRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + name: z.string().min(1).trim().toLowerCase(), + description: z.string().trim().optional(), + provider: ExternalKmsInputSchema + }), + response: { + 200: z.object({ + externalKms: sanitizedExternalSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const externalKms = await server.services.externalKms.create({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + name: req.body.name, + provider: req.body.provider, + description: req.body.description + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.CREATE_KMS, + metadata: { + kmsId: externalKms.id, + provider: req.body.provider.type, + name: req.body.name, + description: req.body.description + } + } + }); + + return { externalKms }; + } + }); + + server.route({ + method: "PATCH", + url: "/:id", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + id: z.string().trim().min(1) + }), + body: z.object({ + name: z.string().min(1).trim().toLowerCase().optional(), + description: z.string().trim().optional(), + provider: ExternalKmsInputUpdateSchema + }), + response: { + 200: z.object({ + externalKms: sanitizedExternalSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const externalKms = await server.services.externalKms.updateById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + name: req.body.name, + provider: req.body.provider, + description: req.body.description, + id: req.params.id + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.UPDATE_KMS, + metadata: { + kmsId: externalKms.id, + provider: req.body.provider.type, + name: req.body.name, + description: req.body.description + } + } + }); + + return { externalKms }; + } + }); + + server.route({ + method: "DELETE", + url: "/:id", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + id: z.string().trim().min(1) + }), + response: { + 200: z.object({ + externalKms: sanitizedExternalSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const externalKms = await server.services.externalKms.deleteById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.DELETE_KMS, + metadata: { + kmsId: externalKms.id, + name: externalKms.name + } + } + }); + + return { externalKms }; + } + }); + + server.route({ + method: "GET", + url: "/:id", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + id: z.string().trim().min(1) + }), + response: { + 200: z.object({ + externalKms: sanitizedExternalSchemaForGetById + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const externalKms = await server.services.externalKms.findById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GET_KMS, + metadata: { + kmsId: externalKms.id, + name: externalKms.name + } + } + }); + + return { externalKms }; + } + }); + + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + response: { + 200: z.object({ + externalKmsList: sanitizedExternalSchemaForGetAll + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const externalKmsList = await server.services.externalKms.list({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + return { externalKmsList }; + } + }); + + server.route({ + method: "GET", + url: "/name/:name", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + name: z.string().trim().min(1) + }), + response: { + 200: z.object({ + externalKms: sanitizedExternalSchemaForGetById + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const externalKms = await server.services.externalKms.findByName({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + name: req.params.name + }); + return { externalKms }; + } + }); +}; diff --git a/backend/src/ee/routes/v1/group-router.ts b/backend/src/ee/routes/v1/group-router.ts index d267564f2c..780e5ec005 100644 --- a/backend/src/ee/routes/v1/group-router.ts +++ b/backend/src/ee/routes/v1/group-router.ts @@ -10,7 +10,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ url: "/", method: "POST", - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { body: z.object({ name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name), @@ -43,12 +43,59 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { }); server.route({ - url: "/:currentSlug", - method: "PATCH", - onRequest: verifyAuth([AuthMode.JWT]), + url: "/:id", + method: "GET", + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { params: z.object({ - currentSlug: z.string().trim().describe(GROUPS.UPDATE.currentSlug) + id: z.string().trim().describe(GROUPS.GET_BY_ID.id) + }), + response: { + 200: GroupsSchema + } + }, + handler: async (req) => { + const group = await server.services.group.getGroupById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + + return group; + } + }); + + server.route({ + url: "/", + method: "GET", + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + response: { + 200: GroupsSchema.array() + } + }, + handler: async (req) => { + const groups = await server.services.org.getOrgGroups({ + actor: req.permission.type, + actorId: req.permission.id, + orgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + return groups; + } + }); + + server.route({ + url: "/:id", + method: "PATCH", + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + params: z.object({ + id: z.string().trim().describe(GROUPS.UPDATE.id) }), body: z .object({ @@ -70,7 +117,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { }, handler: async (req) => { const group = await server.services.group.updateGroup({ - currentSlug: req.params.currentSlug, + id: req.params.id, actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, @@ -83,12 +130,12 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { }); server.route({ - url: "/:slug", + url: "/:id", method: "DELETE", - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { params: z.object({ - slug: z.string().trim().describe(GROUPS.DELETE.slug) + id: z.string().trim().describe(GROUPS.DELETE.id) }), response: { 200: GroupsSchema @@ -96,7 +143,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { }, handler: async (req) => { const group = await server.services.group.deleteGroup({ - groupSlug: req.params.slug, + id: req.params.id, actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, @@ -109,16 +156,17 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ method: "GET", - url: "/:slug/users", - onRequest: verifyAuth([AuthMode.JWT]), + url: "/:id/users", + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { params: z.object({ - slug: z.string().trim().describe(GROUPS.LIST_USERS.slug) + id: z.string().trim().describe(GROUPS.LIST_USERS.id) }), querystring: z.object({ offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset), limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit), - username: z.string().optional().describe(GROUPS.LIST_USERS.username) + username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username), + search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search) }), response: { 200: z.object({ @@ -141,24 +189,25 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { }, handler: async (req) => { const { users, totalCount } = await server.services.group.listGroupUsers({ - groupSlug: req.params.slug, + id: req.params.id, actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, ...req.query }); + return { users, totalCount }; } }); server.route({ method: "POST", - url: "/:slug/users/:username", - onRequest: verifyAuth([AuthMode.JWT]), + url: "/:id/users/:username", + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { params: z.object({ - slug: z.string().trim().describe(GROUPS.ADD_USER.slug), + id: z.string().trim().describe(GROUPS.ADD_USER.id), username: z.string().trim().describe(GROUPS.ADD_USER.username) }), response: { @@ -173,7 +222,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { }, handler: async (req) => { const user = await server.services.group.addUserToGroup({ - groupSlug: req.params.slug, + id: req.params.id, username: req.params.username, actor: req.permission.type, actorId: req.permission.id, @@ -187,11 +236,11 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ method: "DELETE", - url: "/:slug/users/:username", - onRequest: verifyAuth([AuthMode.JWT]), + url: "/:id/users/:username", + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { params: z.object({ - slug: z.string().trim().describe(GROUPS.DELETE_USER.slug), + id: z.string().trim().describe(GROUPS.DELETE_USER.id), username: z.string().trim().describe(GROUPS.DELETE_USER.username) }), response: { @@ -206,7 +255,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { }, handler: async (req) => { const user = await server.services.group.removeUserFromGroup({ - groupSlug: req.params.slug, + id: req.params.id, username: req.params.username, actor: req.permission.type, actorId: req.permission.id, diff --git a/backend/src/ee/routes/v1/identity-project-additional-privilege-router.ts b/backend/src/ee/routes/v1/identity-project-additional-privilege-router.ts index 58c6793d71..d342f95ce7 100644 --- a/backend/src/ee/routes/v1/identity-project-additional-privilege-router.ts +++ b/backend/src/ee/routes/v1/identity-project-additional-privilege-router.ts @@ -1,11 +1,11 @@ -import { packRules } from "@casl/ability/extra"; import slugify from "@sindresorhus/slugify"; import ms from "ms"; import { z } from "zod"; import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types"; +import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission"; import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs"; -import { BadRequestError } from "@app/lib/errors"; +import { UnauthorizedError } from "@app/lib/errors"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; @@ -61,7 +61,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F handler: async (req) => { const { permissions, privilegePermission } = req.body; if (!permissions && !privilegePermission) { - throw new BadRequestError({ message: "Permission or privilegePermission must be provided" }); + throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" }); } const permission = privilegePermission @@ -79,7 +79,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F ...req.body, slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)), isTemporary: false, - permissions: JSON.stringify(packRules(permission)) + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + permissions: backfillPermissionV1SchemaToV2Schema(permission) }); return { privilege }; } @@ -140,7 +142,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F handler: async (req) => { const { permissions, privilegePermission } = req.body; if (!permissions && !privilegePermission) { - throw new BadRequestError({ message: "Permission or privilegePermission must be provided" }); + throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" }); } const permission = privilegePermission @@ -159,7 +161,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F ...req.body, slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)), isTemporary: true, - permissions: JSON.stringify(packRules(permission)) + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + permissions: backfillPermissionV1SchemaToV2Schema(permission) }); return { privilege }; } @@ -224,7 +228,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F handler: async (req) => { const { permissions, privilegePermission, ...updatedInfo } = req.body.privilegeDetails; if (!permissions && !privilegePermission) { - throw new BadRequestError({ message: "Permission or privilegePermission must be provided" }); + throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" }); } const permission = privilegePermission @@ -244,7 +248,13 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F projectSlug: req.body.projectSlug, data: { ...updatedInfo, - permissions: permission ? JSON.stringify(packRules(permission)) : undefined + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + permissions: permission + ? // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + backfillPermissionV1SchemaToV2Schema(permission) + : undefined } }); return { privilege }; diff --git a/backend/src/ee/routes/v1/index.ts b/backend/src/ee/routes/v1/index.ts index 16e23eb887..5e3a0eafe6 100644 --- a/backend/src/ee/routes/v1/index.ts +++ b/backend/src/ee/routes/v1/index.ts @@ -1,15 +1,21 @@ +import { registerProjectTemplateRouter } from "@app/ee/routes/v1/project-template-router"; + import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router"; import { registerAccessApprovalRequestRouter } from "./access-approval-request-router"; import { registerAuditLogStreamRouter } from "./audit-log-stream-router"; +import { registerCaCrlRouter } from "./certificate-authority-crl-router"; import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router"; import { registerDynamicSecretRouter } from "./dynamic-secret-router"; +import { registerExternalKmsRouter } from "./external-kms-router"; import { registerGroupRouter } from "./group-router"; import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router"; import { registerLdapRouter } from "./ldap-router"; import { registerLicenseRouter } from "./license-router"; +import { registerOidcRouter } from "./oidc-router"; import { registerOrgRoleRouter } from "./org-role-router"; import { registerProjectRoleRouter } from "./project-role-router"; import { registerProjectRouter } from "./project-router"; +import { registerRateLimitRouter } from "./rate-limit-router"; import { registerSamlRouter } from "./saml-router"; import { registerScimRouter } from "./scim-router"; import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router"; @@ -45,6 +51,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" }); await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" }); + await server.register(registerRateLimitRouter, { prefix: "/rate-limit" }); await server.register( async (dynamicSecretRouter) => { @@ -54,7 +61,21 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { { prefix: "/dynamic-secrets" } ); - await server.register(registerSamlRouter, { prefix: "/sso" }); + await server.register( + async (pkiRouter) => { + await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" }); + }, + { prefix: "/pki" } + ); + + await server.register( + async (ssoRouter) => { + await ssoRouter.register(registerSamlRouter); + await ssoRouter.register(registerOidcRouter, { prefix: "/oidc" }); + }, + { prefix: "/sso" } + ); + await server.register(registerScimRouter, { prefix: "/scim" }); await server.register(registerLdapRouter, { prefix: "/ldap" }); await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" }); @@ -62,11 +83,17 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { await server.register(registerSecretVersionRouter, { prefix: "/secret" }); await server.register(registerGroupRouter, { prefix: "/groups" }); await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" }); + await server.register(registerUserAdditionalPrivilegeRouter, { prefix: "/user-project-additional-privilege" }); await server.register( async (privilegeRouter) => { - await privilegeRouter.register(registerUserAdditionalPrivilegeRouter, { prefix: "/users" }); await privilegeRouter.register(registerIdentityProjectAdditionalPrivilegeRouter, { prefix: "/identity" }); }, { prefix: "/additional-privilege" } ); + + await server.register(registerExternalKmsRouter, { + prefix: "/external-kms" + }); + + await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" }); }; diff --git a/backend/src/ee/routes/v1/ldap-router.ts b/backend/src/ee/routes/v1/ldap-router.ts index e146668c24..735ba632c0 100644 --- a/backend/src/ee/routes/v1/ldap-router.ts +++ b/backend/src/ee/routes/v1/ldap-router.ts @@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => { // eslint-disable-next-line async (req: IncomingMessage, user, cb) => { try { - if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." }); + if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." }); const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig; let groups: { dn: string; cn: string }[] | undefined; @@ -70,10 +70,13 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => { groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase); } + const externalId = ldapConfig.uniqueUserAttribute ? user[ldapConfig.uniqueUserAttribute] : user.uidNumber; + const username = ldapConfig.uniqueUserAttribute ? externalId : user.uid; + const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({ + externalId, + username, ldapConfigId: ldapConfig.id, - externalId: user.uidNumber, - username: user.uid, firstName: user.givenName ?? user.cn ?? "", lastName: user.sn ?? "", email: user.mail, @@ -138,6 +141,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => { url: z.string(), bindDN: z.string(), bindPass: z.string(), + uniqueUserAttribute: z.string(), searchBase: z.string(), searchFilter: z.string(), groupSearchBase: z.string(), @@ -172,6 +176,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => { url: z.string().trim(), bindDN: z.string().trim(), bindPass: z.string().trim(), + uniqueUserAttribute: z.string().trim().default("uidNumber"), searchBase: z.string().trim(), searchFilter: z.string().trim().default("(uid={{username}})"), groupSearchBase: z.string().trim(), @@ -213,6 +218,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => { url: z.string().trim(), bindDN: z.string().trim(), bindPass: z.string().trim(), + uniqueUserAttribute: z.string().trim(), searchBase: z.string().trim(), searchFilter: z.string().trim(), groupSearchBase: z.string().trim(), diff --git a/backend/src/ee/routes/v1/license-router.ts b/backend/src/ee/routes/v1/license-router.ts index fbf1af43ba..b19faaf707 100644 --- a/backend/src/ee/routes/v1/license-router.ts +++ b/backend/src/ee/routes/v1/license-router.ts @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/no-unsafe-return */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ -// TODO(akhilmhdh): Fix this when licence service gets it type +// TODO(akhilmhdh): Fix this when license service gets it type import { z } from "zod"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; diff --git a/backend/src/ee/routes/v1/oidc-router.ts b/backend/src/ee/routes/v1/oidc-router.ts new file mode 100644 index 0000000000..e675121e97 --- /dev/null +++ b/backend/src/ee/routes/v1/oidc-router.ts @@ -0,0 +1,355 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +/* eslint-disable @typescript-eslint/no-unsafe-return */ +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ +/* eslint-disable @typescript-eslint/no-unsafe-call */ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +// All the any rules are disabled because passport typesense with fastify is really poor + +import { Authenticator, Strategy } from "@fastify/passport"; +import fastifySession from "@fastify/session"; +import RedisStore from "connect-redis"; +import { Redis } from "ioredis"; +import { z } from "zod"; + +import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs"; +import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types"; +import { getConfig } from "@app/lib/config/env"; +import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +export const registerOidcRouter = async (server: FastifyZodProvider) => { + const appCfg = getConfig(); + const redis = new Redis(appCfg.REDIS_URL); + const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" }); + + /* + - OIDC protocol cannot work without sessions: https://github.com/panva/node-openid-client/issues/190 + - Current redis usage is not ideal and will eventually have to be refactored to use a better structure + - Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js + */ + const redisStore = new RedisStore({ + client: redis, + prefix: "oidc-session:", + ttl: 600 // 10 minutes + }); + + await server.register(fastifySession, { + secret: appCfg.COOKIE_SECRET_SIGN_KEY, + store: redisStore, + cookie: { + secure: appCfg.HTTPS_ENABLED, + sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server + } + }); + + await server.register(passport.initialize()); + await server.register(passport.secureSession()); + + // redirect to IDP for login + server.route({ + url: "/login", + method: "GET", + config: { + rateLimit: authRateLimit + }, + schema: { + querystring: z.object({ + orgSlug: z.string().trim(), + callbackPort: z.string().trim().optional() + }) + }, + preValidation: [ + async (req, res) => { + const { orgSlug, callbackPort } = req.query; + + // ensure fresh session state per login attempt + await req.session.regenerate(); + + req.session.set("oidcOrgSlug", orgSlug); + + if (callbackPort) { + req.session.set("callbackPort", callbackPort); + } + + const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(orgSlug, callbackPort); + return ( + passport.authenticate(oidcStrategy as Strategy, { + scope: "profile email openid" + }) as any + )(req, res); + } + ], + handler: () => {} + }); + + // callback route after login from IDP + server.route({ + url: "/callback", + method: "GET", + preValidation: [ + async (req, res) => { + const oidcOrgSlug = req.session.get("oidcOrgSlug"); + const callbackPort = req.session.get("callbackPort"); + const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(oidcOrgSlug, callbackPort); + + return ( + passport.authenticate(oidcStrategy as Strategy, { + failureRedirect: "/api/v1/sso/oidc/login/error", + session: false, + failureMessage: true + }) as any + )(req, res); + } + ], + handler: async (req, res) => { + await req.session.destroy(); + + if (req.passportUser.isUserCompleted) { + return res.redirect( + `${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}` + ); + } + + // signup + return res.redirect( + `${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}` + ); + } + }); + + server.route({ + url: "/login/error", + method: "GET", + handler: async (req, res) => { + await req.session.destroy(); + + return res.status(500).send({ + error: "Authentication error", + details: req.query + }); + } + }); + + server.route({ + url: "/config", + method: "GET", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT]), + schema: { + querystring: z.object({ + orgSlug: z.string().trim() + }), + response: { + 200: OidcConfigsSchema.pick({ + id: true, + issuer: true, + authorizationEndpoint: true, + jwksUri: true, + tokenEndpoint: true, + userinfoEndpoint: true, + configurationType: true, + discoveryURL: true, + isActive: true, + orgId: true, + allowedEmailDomains: true + }).extend({ + clientId: z.string(), + clientSecret: z.string() + }) + } + }, + handler: async (req) => { + const { orgSlug } = req.query; + const oidc = await server.services.oidc.getOidc({ + orgSlug, + type: "external", + actor: req.permission.type, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod + }); + + return oidc; + } + }); + + server.route({ + method: "PATCH", + url: "/config", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT]), + schema: { + body: z + .object({ + allowedEmailDomains: z + .string() + .trim() + .optional() + .default("") + .transform((data) => { + if (data === "") return ""; + // Trim each ID and join with ', ' to ensure formatting + return data + .split(",") + .map((id) => id.trim()) + .join(", "); + }), + discoveryURL: z.string().trim(), + configurationType: z.nativeEnum(OIDCConfigurationType), + issuer: z.string().trim(), + authorizationEndpoint: z.string().trim(), + jwksUri: z.string().trim(), + tokenEndpoint: z.string().trim(), + userinfoEndpoint: z.string().trim(), + clientId: z.string().trim(), + clientSecret: z.string().trim(), + isActive: z.boolean() + }) + .partial() + .merge(z.object({ orgSlug: z.string() })), + response: { + 200: OidcConfigsSchema.pick({ + id: true, + issuer: true, + authorizationEndpoint: true, + configurationType: true, + discoveryURL: true, + jwksUri: true, + tokenEndpoint: true, + userinfoEndpoint: true, + orgId: true, + allowedEmailDomains: true, + isActive: true + }) + } + }, + handler: async (req) => { + const oidc = await server.services.oidc.updateOidcCfg({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + return oidc; + } + }); + + server.route({ + method: "POST", + url: "/config", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT]), + schema: { + body: z + .object({ + allowedEmailDomains: z + .string() + .trim() + .optional() + .default("") + .transform((data) => { + if (data === "") return ""; + // Trim each ID and join with ', ' to ensure formatting + return data + .split(",") + .map((id) => id.trim()) + .join(", "); + }), + configurationType: z.nativeEnum(OIDCConfigurationType), + issuer: z.string().trim().optional().default(""), + discoveryURL: z.string().trim().optional().default(""), + authorizationEndpoint: z.string().trim().optional().default(""), + jwksUri: z.string().trim().optional().default(""), + tokenEndpoint: z.string().trim().optional().default(""), + userinfoEndpoint: z.string().trim().optional().default(""), + clientId: z.string().trim(), + clientSecret: z.string().trim(), + isActive: z.boolean(), + orgSlug: z.string().trim() + }) + .superRefine((data, ctx) => { + if (data.configurationType === OIDCConfigurationType.CUSTOM) { + if (!data.issuer) { + ctx.addIssue({ + path: ["issuer"], + message: "Issuer is required", + code: z.ZodIssueCode.custom + }); + } + if (!data.authorizationEndpoint) { + ctx.addIssue({ + path: ["authorizationEndpoint"], + message: "Authorization endpoint is required", + code: z.ZodIssueCode.custom + }); + } + if (!data.jwksUri) { + ctx.addIssue({ + path: ["jwksUri"], + message: "JWKS URI is required", + code: z.ZodIssueCode.custom + }); + } + if (!data.tokenEndpoint) { + ctx.addIssue({ + path: ["tokenEndpoint"], + message: "Token endpoint is required", + code: z.ZodIssueCode.custom + }); + } + if (!data.userinfoEndpoint) { + ctx.addIssue({ + path: ["userinfoEndpoint"], + message: "Userinfo endpoint is required", + code: z.ZodIssueCode.custom + }); + } + } else { + // eslint-disable-next-line no-lonely-if + if (!data.discoveryURL) { + ctx.addIssue({ + path: ["discoveryURL"], + message: "Discovery URL is required", + code: z.ZodIssueCode.custom + }); + } + } + }), + response: { + 200: OidcConfigsSchema.pick({ + id: true, + issuer: true, + authorizationEndpoint: true, + configurationType: true, + discoveryURL: true, + jwksUri: true, + tokenEndpoint: true, + userinfoEndpoint: true, + orgId: true, + isActive: true, + allowedEmailDomains: true + }) + } + }, + + handler: async (req) => { + const oidc = await server.services.oidc.createOidcCfg({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + return oidc; + } + }); +}; diff --git a/backend/src/ee/routes/v1/org-role-router.ts b/backend/src/ee/routes/v1/org-role-router.ts index 6691032a85..232f4b0b53 100644 --- a/backend/src/ee/routes/v1/org-role-router.ts +++ b/backend/src/ee/routes/v1/org-role-router.ts @@ -52,6 +52,36 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/:organizationId/roles/:roleId", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + organizationId: z.string().trim(), + roleId: z.string().trim() + }), + response: { + 200: z.object({ + role: OrgRolesSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const role = await server.services.orgRole.getRole( + req.permission.id, + req.params.organizationId, + req.params.roleId, + req.permission.authMethod, + req.permission.orgId + ); + return { role }; + } + }); + server.route({ method: "PATCH", url: "/:organizationId/roles/:roleId", @@ -69,7 +99,7 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => { .trim() .optional() .refine( - (val) => typeof val === "undefined" || Object.keys(OrgMembershipRole).includes(val), + (val) => typeof val !== "undefined" && !Object.keys(OrgMembershipRole).includes(val), "Please choose a different slug, the slug you have entered is reserved." ) .refine((val) => typeof val === "undefined" || slugify(val) === val, { @@ -77,7 +107,7 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => { }), name: z.string().trim().optional(), description: z.string().trim().optional(), - permissions: z.any().array() + permissions: z.any().array().optional() }), response: { 200: z.object({ diff --git a/backend/src/ee/routes/v1/project-role-router.ts b/backend/src/ee/routes/v1/project-role-router.ts index 69038a0575..ba2c0aa9f5 100644 --- a/backend/src/ee/routes/v1/project-role-router.ts +++ b/backend/src/ee/routes/v1/project-role-router.ts @@ -3,11 +3,16 @@ import slugify from "@sindresorhus/slugify"; import { z } from "zod"; import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas"; +import { + backfillPermissionV1SchemaToV2Schema, + ProjectPermissionV1Schema +} from "@app/ee/services/permission/project-permission"; import { PROJECT_ROLE } from "@app/lib/api-docs"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -import { ProjectPermissionSchema, SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas"; +import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas"; import { AuthMode } from "@app/services/auth/auth-type"; +import { ProjectRoleServiceIdentifierType } from "@app/services/project-role/project-role-types"; export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { server.route({ @@ -42,11 +47,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { .describe(PROJECT_ROLE.CREATE.slug), name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name), description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description), - permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.CREATE.permissions) + permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions) }), response: { 200: z.object({ - role: SanitizedRoleSchema + role: SanitizedRoleSchemaV1 }) } }, @@ -57,12 +62,16 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { actorId: req.permission.id, actorOrgId: req.permission.orgId, actor: req.permission.type, - projectSlug: req.params.projectSlug, + filter: { + type: ProjectRoleServiceIdentifierType.SLUG, + projectSlug: req.params.projectSlug + }, data: { ...req.body, - permissions: JSON.stringify(packRules(req.body.permissions)) + permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(req.body.permissions, true))) } }); + return { role }; } }); @@ -101,11 +110,12 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { message: "Slug must be a valid" }), name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name), - permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions) + description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description), + permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional() }), response: { 200: z.object({ - role: SanitizedRoleSchema + role: SanitizedRoleSchemaV1 }) } }, @@ -116,11 +126,12 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { actorId: req.permission.id, actorOrgId: req.permission.orgId, actor: req.permission.type, - projectSlug: req.params.projectSlug, roleId: req.params.roleId, data: { ...req.body, - permissions: JSON.stringify(packRules(req.body.permissions)) + permissions: req.body.permissions + ? JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(req.body.permissions, true))) + : undefined } }); return { role }; @@ -146,7 +157,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - role: SanitizedRoleSchema + role: SanitizedRoleSchemaV1 }) } }, @@ -157,7 +168,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { actorId: req.permission.id, actorOrgId: req.permission.orgId, actor: req.permission.type, - projectSlug: req.params.projectSlug, roleId: req.params.roleId }); return { role }; @@ -182,7 +192,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - roles: ProjectRolesSchema.omit({ permissions: true }).array() + roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array() }) } }, @@ -193,7 +203,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { actorId: req.permission.id, actorOrgId: req.permission.orgId, actor: req.permission.type, - projectSlug: req.params.projectSlug + filter: { + type: ProjectRoleServiceIdentifierType.SLUG, + projectSlug: req.params.projectSlug + } }); return { roles }; } @@ -212,7 +225,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - role: SanitizedRoleSchema + role: SanitizedRoleSchemaV1.omit({ version: true }) }) } }, @@ -223,9 +236,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { actorId: req.permission.id, actorOrgId: req.permission.orgId, actor: req.permission.type, - projectSlug: req.params.projectSlug, + filter: { + type: ProjectRoleServiceIdentifierType.SLUG, + projectSlug: req.params.projectSlug + }, roleSlug: req.params.slug }); + return { role }; } }); diff --git a/backend/src/ee/routes/v1/project-router.ts b/backend/src/ee/routes/v1/project-router.ts index 9795aaf863..e3956731eb 100644 --- a/backend/src/ee/routes/v1/project-router.ts +++ b/backend/src/ee/routes/v1/project-router.ts @@ -4,9 +4,10 @@ import { AuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas"; import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types"; import { AUDIT_LOGS, PROJECTS } from "@app/lib/api-docs"; import { getLastMidnightDateISO, removeTrailingSlash } from "@app/lib/fn"; -import { readLimit } from "@app/server/config/rateLimiter"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { KmsType } from "@app/services/kms/kms-types"; export const registerProjectRouter = async (server: FastifyZodProvider) => { server.route({ @@ -86,6 +87,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { } }); + /* + * Daniel: This endpoint is no longer is use. + * We are keeping it for now because it has been exposed in our public api docs for a while, so by removing it we are likely to break users workflows. + * + * Please refer to the new endpoint, GET /api/v1/organization/audit-logs, for the same (and more) functionality. + */ server.route({ method: "GET", url: "/:workspaceId/audit-logs", @@ -100,7 +107,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { } ], params: z.object({ - workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.workspaceId) + workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.projectId) }), querystring: z.object({ eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType), @@ -121,6 +128,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { }) .merge( z.object({ + project: z + .object({ + name: z.string(), + slug: z.string() + }) + .optional(), event: z.object({ type: z.string(), metadata: z.any() @@ -137,15 +150,20 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const auditLogs = await server.services.auditLog.listProjectAuditLogs({ + const auditLogs = await server.services.auditLog.listAuditLogs({ actorId: req.permission.id, actorOrgId: req.permission.orgId, actorAuthMethod: req.permission.authMethod, - projectId: req.params.workspaceId, - ...req.query, - startDate: req.query.endDate || getLastMidnightDateISO(), - auditLogActor: req.query.actor, - actor: req.permission.type + actor: req.permission.type, + + filter: { + ...req.query, + projectId: req.params.workspaceId, + endDate: req.query.endDate, + startDate: req.query.startDate || getLastMidnightDateISO(), + auditLogActorId: req.query.actor, + eventType: req.query.eventType ? [req.query.eventType] : undefined + } }); return { auditLogs }; } @@ -170,4 +188,212 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { onRequest: verifyAuth([AuthMode.JWT]), handler: async () => ({ actors: [] }) }); + + server.route({ + method: "GET", + url: "/:workspaceId/kms", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + response: { + 200: z.object({ + secretManagerKmsKey: z.object({ + id: z.string(), + name: z.string(), + isExternal: z.boolean() + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const kmsKey = await server.services.project.getProjectKmsKeys({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId + }); + + return kmsKey; + } + }); + + server.route({ + method: "PATCH", + url: "/:workspaceId/kms", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + body: z.object({ + kms: z.discriminatedUnion("type", [ + z.object({ type: z.literal(KmsType.Internal) }), + z.object({ type: z.literal(KmsType.External), kmsId: z.string() }) + ]) + }), + response: { + 200: z.object({ + secretManagerKmsKey: z.object({ + id: z.string(), + name: z.string(), + isExternal: z.boolean() + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { secretManagerKmsKey } = await server.services.project.updateProjectKmsKey({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.workspaceId, + event: { + type: EventType.UPDATE_PROJECT_KMS, + metadata: { + secretManagerKmsKey: { + id: secretManagerKmsKey.id, + name: secretManagerKmsKey.name + } + } + } + }); + + return { + secretManagerKmsKey + }; + } + }); + + server.route({ + method: "GET", + url: "/:workspaceId/kms/backup", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + response: { + 200: z.object({ + secretManager: z.string() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const backup = await server.services.project.getProjectKmsBackup({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.workspaceId, + event: { + type: EventType.GET_PROJECT_KMS_BACKUP, + metadata: {} + } + }); + + return backup; + } + }); + + server.route({ + method: "POST", + url: "/:workspaceId/kms/backup", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + body: z.object({ + backup: z.string().min(1) + }), + response: { + 200: z.object({ + secretManagerKmsKey: z.object({ + id: z.string(), + name: z.string(), + isExternal: z.boolean() + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const backup = await server.services.project.loadProjectKmsBackup({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId, + backup: req.body.backup + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.workspaceId, + event: { + type: EventType.LOAD_PROJECT_KMS_BACKUP, + metadata: {} + } + }); + + return backup; + } + }); + + server.route({ + method: "POST", + url: "/:workspaceId/migrate-v3", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + + response: { + 200: z.object({ + message: z.string() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const migration = await server.services.secret.startSecretV2Migration({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId + }); + + return migration; + } + }); }; diff --git a/backend/src/ee/routes/v1/project-template-router.ts b/backend/src/ee/routes/v1/project-template-router.ts new file mode 100644 index 0000000000..5b115ab4ec --- /dev/null +++ b/backend/src/ee/routes/v1/project-template-router.ts @@ -0,0 +1,309 @@ +import slugify from "@sindresorhus/slugify"; +import { z } from "zod"; + +import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission"; +import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants"; +import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns"; +import { ProjectTemplates } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768; + +const SlugSchema = z + .string() + .trim() + .min(1) + .max(32) + .refine((val) => val.toLowerCase() === val, "Must be lowercase") + .refine((v) => slugify(v) === v, { + message: "Must be valid slug format" + }); + +const isReservedRoleSlug = (slug: string) => + Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole); + +const isReservedRoleName = (name: string) => + ["custom", "admin", "viewer", "developer", "no access"].includes(name.toLowerCase()); + +const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({ + roles: z + .object({ + name: z.string().trim().min(1), + slug: SlugSchema, + permissions: UnpackedPermissionSchema.array() + }) + .array(), + environments: z + .object({ + name: z.string().trim().min(1), + slug: SlugSchema, + position: z.number().min(1) + }) + .array() +}); + +const ProjectTemplateRolesSchema = z + .object({ + name: z.string().trim().min(1), + slug: SlugSchema, + permissions: ProjectPermissionV2Schema.array() + }) + .array() + .superRefine((roles, ctx) => { + if (!roles.length) return; + + if (Buffer.byteLength(JSON.stringify(roles)) > MAX_JSON_SIZE_LIMIT_IN_BYTES) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" }); + + if (new Set(roles.map((v) => v.slug)).size !== roles.length) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role slugs must be unique" }); + + if (new Set(roles.map((v) => v.name)).size !== roles.length) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role names must be unique" }); + + roles.forEach((role) => { + if (isReservedRoleSlug(role.slug)) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role slug "${role.slug}" is reserved` }); + + if (isReservedRoleName(role.name)) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role name "${role.name}" is reserved` }); + }); + }); + +const ProjectTemplateEnvironmentsSchema = z + .object({ + name: z.string().trim().min(1), + slug: SlugSchema, + position: z.number().min(1) + }) + .array() + .min(1) + .superRefine((environments, ctx) => { + if (Buffer.byteLength(JSON.stringify(environments)) > MAX_JSON_SIZE_LIMIT_IN_BYTES) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" }); + + if (new Set(environments.map((v) => v.name)).size !== environments.length) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment names must be unique" }); + + if (new Set(environments.map((v) => v.slug)).size !== environments.length) + ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment slugs must be unique" }); + + if ( + environments.some((env) => env.position < 1 || env.position > environments.length) || + new Set(environments.map((env) => env.position)).size !== environments.length + ) + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "One or more of the positions specified is invalid. Positions must be sequential starting from 1." + }); + }); + +export const registerProjectTemplateRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + description: "List project templates for the current organization.", + response: { + 200: z.object({ + projectTemplates: SanitizedProjectTemplateSchema.array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission); + + const auditTemplates = projectTemplates.filter((template) => !isInfisicalProjectTemplate(template.name)); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GET_PROJECT_TEMPLATES, + metadata: { + count: auditTemplates.length, + templateIds: auditTemplates.map((template) => template.id) + } + } + }); + + return { projectTemplates }; + } + }); + + server.route({ + method: "GET", + url: "/:templateId", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get a project template by ID.", + params: z.object({ + templateId: z.string().uuid() + }), + response: { + 200: z.object({ + projectTemplate: SanitizedProjectTemplateSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const projectTemplate = await server.services.projectTemplate.findProjectTemplateById( + req.params.templateId, + req.permission + ); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GET_PROJECT_TEMPLATE, + metadata: { + templateId: req.params.templateId + } + } + }); + + return { projectTemplate }; + } + }); + + server.route({ + method: "POST", + url: "/", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Create a project template.", + body: z.object({ + name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), { + message: `The requested project template name is reserved.` + }).describe(ProjectTemplates.CREATE.name), + description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description), + roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles), + environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe( + ProjectTemplates.CREATE.environments + ) + }), + response: { + 200: z.object({ + projectTemplate: SanitizedProjectTemplateSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const projectTemplate = await server.services.projectTemplate.createProjectTemplate(req.body, req.permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.CREATE_PROJECT_TEMPLATE, + metadata: req.body + } + }); + + return { projectTemplate }; + } + }); + + server.route({ + method: "PATCH", + url: "/:templateId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Update a project template.", + params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }), + body: z.object({ + name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), { + message: `The requested project template name is reserved.` + }) + .optional() + .describe(ProjectTemplates.UPDATE.name), + description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description), + roles: ProjectTemplateRolesSchema.optional().describe(ProjectTemplates.UPDATE.roles), + environments: ProjectTemplateEnvironmentsSchema.optional().describe(ProjectTemplates.UPDATE.environments) + }), + response: { + 200: z.object({ + projectTemplate: SanitizedProjectTemplateSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const projectTemplate = await server.services.projectTemplate.updateProjectTemplateById( + req.params.templateId, + req.body, + req.permission + ); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.UPDATE_PROJECT_TEMPLATE, + metadata: { + templateId: req.params.templateId, + ...req.body + } + } + }); + + return { projectTemplate }; + } + }); + + server.route({ + method: "DELETE", + url: "/:templateId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Delete a project template.", + params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.DELETE.templateId) }), + + response: { + 200: z.object({ + projectTemplate: SanitizedProjectTemplateSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const projectTemplate = await server.services.projectTemplate.deleteProjectTemplateById( + req.params.templateId, + req.permission + ); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.DELETE_PROJECT_TEMPLATE, + metadata: { + templateId: req.params.templateId + } + } + }); + + return { projectTemplate }; + } + }); +}; diff --git a/backend/src/ee/routes/v1/rate-limit-router.ts b/backend/src/ee/routes/v1/rate-limit-router.ts new file mode 100644 index 0000000000..1ba65405aa --- /dev/null +++ b/backend/src/ee/routes/v1/rate-limit-router.ts @@ -0,0 +1,74 @@ +import { z } from "zod"; + +import { RateLimitSchema } from "@app/db/schemas"; +import { NotFoundError } from "@app/lib/errors"; +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +export const registerRateLimitRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + response: { + 200: z.object({ + rateLimit: RateLimitSchema + }) + } + }, + onRequest: (req, res, done) => { + verifyAuth([AuthMode.JWT])(req, res, () => { + verifySuperAdmin(req, res, done); + }); + }, + handler: async () => { + const rateLimit = await server.services.rateLimit.getRateLimits(); + if (!rateLimit) { + throw new NotFoundError({ + name: "Get Rate Limit Error", + message: "Rate limit configuration does not exist." + }); + } + return { rateLimit }; + } + }); + + server.route({ + method: "PUT", + url: "/", + config: { + rateLimit: readLimit + }, + onRequest: (req, res, done) => { + verifyAuth([AuthMode.JWT])(req, res, () => { + verifySuperAdmin(req, res, done); + }); + }, + + schema: { + body: z.object({ + readRateLimit: z.number(), + writeRateLimit: z.number(), + secretsRateLimit: z.number(), + authRateLimit: z.number(), + inviteUserRateLimit: z.number(), + mfaRateLimit: z.number(), + publicEndpointLimit: z.number() + }), + response: { + 200: z.object({ + rateLimit: RateLimitSchema + }) + } + }, + handler: async (req) => { + const rateLimit = await server.services.rateLimit.updateRateLimit(req.body); + return { rateLimit }; + } + }); +}; diff --git a/backend/src/ee/routes/v1/saml-router.ts b/backend/src/ee/routes/v1/saml-router.ts index 6001b8b6ec..b195d32b35 100644 --- a/backend/src/ee/routes/v1/saml-router.ts +++ b/backend/src/ee/routes/v1/saml-router.ts @@ -61,7 +61,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => { id: samlConfigId }; } else { - throw new BadRequestError({ message: "Missing sso identitier or org slug" }); + throw new BadRequestError({ message: "Missing sso identifier or org slug" }); } const ssoConfig = await server.services.saml.getSaml(ssoLookupDetails); @@ -100,25 +100,57 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => { async (req, profile, cb) => { try { if (!profile) throw new BadRequestError({ message: "Missing profile" }); - const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved - if (!email || !profile.firstName) { - throw new BadRequestError({ message: "Invalid request. Missing email or first name" }); + const email = + profile?.email ?? + // entra sends data in this format + (profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ?? + (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\ + + const firstName = (profile.firstName ?? + // entra sends data in this format + profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/firstName"]) as string; + + const lastName = + profile.lastName ?? profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/lastName"]; + + if (!email || !firstName) { + logger.info( + { + err: new Error("Invalid saml request. Missing email or first name"), + profile + }, + `email: ${email} firstName: ${profile.firstName as string}` + ); + + throw new Error("Invalid saml request. Missing email or first name"); } + const userMetadata = Object.keys(profile.attributes || {}) + .map((key) => { + // for the ones like in format: http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email + const formatedKey = key.startsWith("http") ? key.split("/").at(-1) || "" : key; + return { + key: formatedKey, + value: String((profile.attributes as Record)[key]).substring(0, 1020) + }; + }) + .filter((el) => el.key && !["email", "firstName", "lastName"].includes(el.key)); + const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({ externalId: profile.nameID, email, - firstName: profile.firstName as string, - lastName: profile.lastName as string, + firstName, + lastName: lastName as string, relayState: (req.body as { RelayState?: string }).RelayState, authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string, - orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string + orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string, + metadata: userMetadata }); cb(null, { isUserCompleted, providerAuthToken }); } catch (error) { logger.error(error); - cb(null, {}); + cb(error as Error); } }, () => {} diff --git a/backend/src/ee/routes/v1/scim-router.ts b/backend/src/ee/routes/v1/scim-router.ts index 8965c28f3b..cd5f2f9f38 100644 --- a/backend/src/ee/routes/v1/scim-router.ts +++ b/backend/src/ee/routes/v1/scim-router.ts @@ -5,19 +5,47 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +const ScimUserSchema = z.object({ + schemas: z.array(z.string()), + id: z.string().trim(), + userName: z.string().trim(), + name: z + .object({ + familyName: z.string().trim().optional(), + givenName: z.string().trim().optional() + }) + .optional(), + emails: z + .array( + z.object({ + primary: z.boolean(), + value: z.string().email(), + type: z.string().trim().default("work") + }) + ) + .optional(), + displayName: z.string().trim(), + active: z.boolean() +}); + +const ScimGroupSchema = z.object({ + schemas: z.array(z.string()), + id: z.string().trim(), + displayName: z.string().trim(), + members: z + .array( + z.object({ + value: z.string(), + display: z.string().optional() + }) + ) + .optional(), + meta: z.object({ + resourceType: z.string().trim() + }) +}); + export const registerScimRouter = async (server: FastifyZodProvider) => { - server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => { - try { - const strBody = body instanceof Buffer ? body.toString() : body; - - const json: unknown = JSON.parse(strBody); - done(null, json); - } catch (err) { - const error = err as Error; - done(error, undefined); - } - }); - server.route({ url: "/scim-tokens", method: "POST", @@ -124,25 +152,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - Resources: z.array( - z.object({ - id: z.string().trim(), - userName: z.string().trim(), - name: z.object({ - familyName: z.string().trim(), - givenName: z.string().trim() - }), - emails: z.array( - z.object({ - primary: z.boolean(), - value: z.string(), - type: z.string().trim() - }) - ), - displayName: z.string().trim(), - active: z.boolean() - }) - ), + Resources: z.array(ScimUserSchema), itemsPerPage: z.number(), schemas: z.array(z.string()), startIndex: z.number(), @@ -170,24 +180,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { orgMembershipId: z.string().trim() }), response: { - 201: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - userName: z.string().trim(), - name: z.object({ - familyName: z.string().trim(), - givenName: z.string().trim() - }), - emails: z.array( - z.object({ - primary: z.boolean(), - value: z.string(), - type: z.string().trim() - }) - ), - displayName: z.string().trim(), - active: z.boolean() - }) + 200: ScimUserSchema } }, onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), @@ -207,41 +200,24 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { body: z.object({ schemas: z.array(z.string()), userName: z.string().trim(), - name: z.object({ - familyName: z.string().trim(), - givenName: z.string().trim() - }), + name: z + .object({ + familyName: z.string().trim().optional(), + givenName: z.string().trim().optional() + }) + .optional(), emails: z .array( z.object({ primary: z.boolean(), - value: z.string().email(), - type: z.string().trim() + value: z.string().email() }) ) .optional(), - // displayName: z.string().trim(), - active: z.boolean() + active: z.boolean().default(true) }), response: { - 200: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - userName: z.string().trim(), - name: z.object({ - familyName: z.string().trim(), - givenName: z.string().trim() - }), - emails: z.array( - z.object({ - primary: z.boolean(), - value: z.string().email(), - type: z.string().trim() - }) - ), - displayName: z.string().trim(), - active: z.boolean() - }) + 200: ScimUserSchema } }, onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), @@ -251,8 +227,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { const user = await req.server.services.scim.createScimUser({ externalId: req.body.userName, email: primaryEmail, - firstName: req.body.name.givenName, - lastName: req.body.name.familyName, + firstName: req.body?.name?.givenName, + lastName: req.body?.name?.familyName, orgId: req.permission.orgId }); @@ -282,6 +258,115 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + url: "/Users/:orgMembershipId", + method: "PUT", + schema: { + params: z.object({ + orgMembershipId: z.string().trim() + }), + body: z.object({ + schemas: z.array(z.string()), + id: z.string().trim(), + userName: z.string().trim(), + name: z + .object({ + familyName: z.string().trim().optional(), + givenName: z.string().trim().optional() + }) + .optional(), + displayName: z.string().trim(), + emails: z + .array( + z.object({ + primary: z.boolean(), + value: z.string().email() + }) + ) + .optional(), + active: z.boolean() + }), + response: { + 200: z.object({ + schemas: z.array(z.string()), + id: z.string().trim(), + userName: z.string().trim(), + name: z.object({ + familyName: z.string().trim(), + givenName: z.string().trim() + }), + emails: z.array( + z.object({ + primary: z.boolean(), + value: z.string().email(), + type: z.string().trim().default("work") + }) + ), + displayName: z.string().trim(), + active: z.boolean() + }) + } + }, + onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), + handler: async (req) => { + const primaryEmail = req.body.emails?.find((email) => email.primary)?.value; + const user = await req.server.services.scim.replaceScimUser({ + orgMembershipId: req.params.orgMembershipId, + orgId: req.permission.orgId, + firstName: req.body?.name?.givenName, + lastName: req.body?.name?.familyName, + active: req.body?.active, + email: primaryEmail, + externalId: req.body.userName + }); + return user; + } + }); + + server.route({ + url: "/Users/:orgMembershipId", + method: "PATCH", + schema: { + params: z.object({ + orgMembershipId: z.string().trim() + }), + body: z.object({ + schemas: z.array(z.string()), + Operations: z.array( + z.union([ + z.object({ + op: z.union([z.literal("remove"), z.literal("Remove")]), + path: z.string().trim(), + value: z + .object({ + value: z.string() + }) + .array() + .optional() + }), + z.object({ + op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]), + path: z.string().trim().optional(), + value: z.any().optional() + }) + ]) + ) + }), + response: { + 200: ScimUserSchema + } + }, + onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), + handler: async (req) => { + const user = await req.server.services.scim.updateScimUser({ + orgMembershipId: req.params.orgMembershipId, + orgId: req.permission.orgId, + operations: req.body.Operations + }); + + return user; + } + }); server.route({ url: "/Groups", method: "POST", @@ -296,25 +381,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { display: z.string() }) ) - .optional() // okta-specific + .optional() }), response: { - 200: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - displayName: z.string().trim(), - members: z - .array( - z.object({ - value: z.string(), - display: z.string() - }) - ) - .optional(), - meta: z.object({ - resourceType: z.string().trim() - }) - }) + 200: ScimGroupSchema } }, onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), @@ -335,21 +405,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { querystring: z.object({ startIndex: z.coerce.number().default(1), count: z.coerce.number().default(20), - filter: z.string().trim().optional() + filter: z.string().trim().optional(), + excludedAttributes: z.string().trim().optional() }), response: { 200: z.object({ - Resources: z.array( - z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - displayName: z.string().trim(), - members: z.array(z.any()).length(0), - meta: z.object({ - resourceType: z.string().trim() - }) - }) - ), + Resources: z.array(ScimGroupSchema), itemsPerPage: z.number(), schemas: z.array(z.string()), startIndex: z.number(), @@ -362,7 +423,9 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { const groups = await req.server.services.scim.listScimGroups({ orgId: req.permission.orgId, startIndex: req.query.startIndex, - limit: req.query.count + filter: req.query.filter, + limit: req.query.count, + isMembersExcluded: req.query.excludedAttributes === "members" }); return groups; @@ -377,20 +440,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { groupId: z.string().trim() }), response: { - 200: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - displayName: z.string().trim(), - members: z.array( - z.object({ - value: z.string(), - display: z.string() - }) - ), - meta: z.object({ - resourceType: z.string().trim() - }) - }) + 200: ScimGroupSchema } }, onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), @@ -399,6 +449,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { groupId: req.params.groupId, orgId: req.permission.orgId }); + return group; } }); @@ -416,31 +467,18 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { displayName: z.string().trim(), members: z.array( z.object({ - value: z.string(), // infisical orgMembershipId + value: z.string(), display: z.string() }) ) }), response: { - 200: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - displayName: z.string().trim(), - members: z.array( - z.object({ - value: z.string(), - display: z.string() - }) - ), - meta: z.object({ - resourceType: z.string().trim() - }) - }) + 200: ScimGroupSchema } }, onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), handler: async (req) => { - const group = await req.server.services.scim.updateScimGroupNamePut({ + const group = await req.server.services.scim.replaceScimGroup({ groupId: req.params.groupId, orgId: req.permission.orgId, ...req.body @@ -462,51 +500,34 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { Operations: z.array( z.union([ z.object({ - op: z.literal("replace"), - value: z.object({ - id: z.string().trim(), - displayName: z.string().trim() - }) + op: z.union([z.literal("remove"), z.literal("Remove")]), + path: z.string().trim(), + value: z + .object({ + value: z.string() + }) + .array() + .optional() }), z.object({ - op: z.literal("remove"), - path: z.string().trim() - }), - z.object({ - op: z.literal("add"), - value: z.object({ - value: z.string().trim(), - display: z.string().trim().optional() - }) + op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]), + path: z.string().trim().optional(), + value: z.any() }) ]) ) }), response: { - 200: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - displayName: z.string().trim(), - members: z.array( - z.object({ - value: z.string(), - display: z.string() - }) - ), - meta: z.object({ - resourceType: z.string().trim() - }) - }) + 200: ScimGroupSchema } }, onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), handler: async (req) => { - const group = await req.server.services.scim.updateScimGroupNamePatch({ + const group = await req.server.services.scim.updateScimGroup({ groupId: req.params.groupId, orgId: req.permission.orgId, operations: req.body.Operations }); - return group; } }); @@ -532,54 +553,4 @@ export const registerScimRouter = async (server: FastifyZodProvider) => { return group; } }); - - server.route({ - url: "/Users/:orgMembershipId", - method: "PUT", - schema: { - params: z.object({ - orgMembershipId: z.string().trim() - }), - body: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - userName: z.string().trim(), - name: z.object({ - familyName: z.string().trim(), - givenName: z.string().trim() - }), - displayName: z.string().trim(), - active: z.boolean() - }), - response: { - 200: z.object({ - schemas: z.array(z.string()), - id: z.string().trim(), - userName: z.string().trim(), - name: z.object({ - familyName: z.string().trim(), - givenName: z.string().trim() - }), - emails: z.array( - z.object({ - primary: z.boolean(), - value: z.string().email(), - type: z.string().trim() - }) - ), - displayName: z.string().trim(), - active: z.boolean() - }) - } - }, - onRequest: verifyAuth([AuthMode.SCIM_TOKEN]), - handler: async (req) => { - const user = await req.server.services.scim.replaceScimUser({ - orgMembershipId: req.params.orgMembershipId, - orgId: req.permission.orgId, - active: req.body.active - }); - return user; - } - }); }; diff --git a/backend/src/ee/routes/v1/secret-approval-policy-router.ts b/backend/src/ee/routes/v1/secret-approval-policy-router.ts index f6a9556258..40f0a71bda 100644 --- a/backend/src/ee/routes/v1/secret-approval-policy-router.ts +++ b/backend/src/ee/routes/v1/secret-approval-policy-router.ts @@ -1,6 +1,9 @@ import { nanoid } from "nanoid"; import { z } from "zod"; +import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types"; +import { removeTrailingSlash } from "@app/lib/fn"; +import { EnforcementLevel } from "@app/lib/types"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { sapPubSchema } from "@app/server/routes/sanitizedSchemas"; @@ -14,26 +17,33 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi rateLimit: writeLimit }, schema: { - body: z - .object({ - workspaceId: z.string(), - name: z.string().optional(), - environment: z.string(), - secretPath: z.string().optional().nullable(), - approvers: z.string().array().min(1), - approvals: z.number().min(1).default(1) - }) - .refine((data) => data.approvals <= data.approvers.length, { - path: ["approvals"], - message: "The number of approvals should be lower than the number of approvers." - }), + body: z.object({ + workspaceId: z.string(), + name: z.string().optional(), + environment: z.string(), + secretPath: z + .string() + .optional() + .nullable() + .default("/") + .transform((val) => (val ? removeTrailingSlash(val) : val)), + approvers: z + .discriminatedUnion("type", [ + z.object({ type: z.literal(ApproverType.Group), id: z.string() }), + z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() }) + ]) + .array() + .min(1, { message: "At least one approver should be provided" }), + approvals: z.number().min(1).default(1), + enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard) + }), response: { 200: z.object({ approval: sapPubSchema }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const approval = await server.services.secretApprovalPolicy.createSecretApprovalPolicy({ actor: req.permission.type, @@ -42,7 +52,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi actorOrgId: req.permission.orgId, projectId: req.body.workspaceId, ...req.body, - name: req.body.name ?? `${req.body.environment}-${nanoid(3)}` + name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`, + enforcementLevel: req.body.enforcementLevel }); return { approval }; } @@ -58,24 +69,31 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi params: z.object({ sapId: z.string() }), - body: z - .object({ - name: z.string().optional(), - approvers: z.string().array().min(1), - approvals: z.number().min(1).default(1), - secretPath: z.string().optional().nullable() - }) - .refine((data) => data.approvals <= data.approvers.length, { - path: ["approvals"], - message: "The number of approvals should be lower than the number of approvers." - }), + body: z.object({ + name: z.string().optional(), + approvers: z + .discriminatedUnion("type", [ + z.object({ type: z.literal(ApproverType.Group), id: z.string() }), + z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() }) + ]) + .array() + .min(1, { message: "At least one approver should be provided" }), + approvals: z.number().min(1).default(1), + secretPath: z + .string() + .optional() + .nullable() + .transform((val) => (val ? removeTrailingSlash(val) : val)) + .transform((val) => (val === "" ? "/" : val)), + enforcementLevel: z.nativeEnum(EnforcementLevel).optional() + }), response: { 200: z.object({ approval: sapPubSchema }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const approval = await server.services.secretApprovalPolicy.updateSecretApprovalPolicy({ actor: req.permission.type, @@ -105,7 +123,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const approval = await server.services.secretApprovalPolicy.deleteSecretApprovalPolicy({ actor: req.permission.type, @@ -130,7 +148,16 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi }), response: { 200: z.object({ - approvals: sapPubSchema.merge(z.object({ approvers: z.string().array() })).array() + approvals: sapPubSchema + .extend({ + approvers: z + .object({ + id: z.string().nullable().optional(), + type: z.nativeEnum(ApproverType) + }) + .array() + }) + .array() }) } }, @@ -147,6 +174,44 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi } }); + server.route({ + url: "/:sapId", + method: "GET", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + sapId: z.string() + }), + response: { + 200: z.object({ + approval: sapPubSchema.extend({ + approvers: z + .object({ + id: z.string().nullable().optional(), + type: z.nativeEnum(ApproverType), + name: z.string().nullable().optional() + }) + .array() + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const approval = await server.services.secretApprovalPolicy.getSecretApprovalPolicyById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.params + }); + + return { approval }; + } + }); + server.route({ url: "/board", method: "GET", @@ -157,11 +222,15 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi querystring: z.object({ workspaceId: z.string().trim(), environment: z.string().trim(), - secretPath: z.string().trim() + secretPath: z.string().trim().transform(removeTrailingSlash) }), response: { 200: z.object({ - policy: sapPubSchema.merge(z.object({ approvers: z.string().array() })).optional() + policy: sapPubSchema + .extend({ + userApprovers: z.object({ userId: z.string().nullable().optional() }).array() + }) + .optional() }) } }, diff --git a/backend/src/ee/routes/v1/secret-approval-request-router.ts b/backend/src/ee/routes/v1/secret-approval-request-router.ts index b7204f72e2..5fbf784f61 100644 --- a/backend/src/ee/routes/v1/secret-approval-request-router.ts +++ b/backend/src/ee/routes/v1/secret-approval-request-router.ts @@ -3,17 +3,25 @@ import { z } from "zod"; import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, - SecretApprovalRequestsSecretsSchema, - SecretsSchema, SecretTagsSchema, - SecretVersionsSchema + UsersSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { secretRawSchema } from "@app/server/routes/sanitizedSchemas"; import { AuthMode } from "@app/services/auth/auth-type"; +const approvalRequestUser = z.object({ userId: z.string().nullable().optional() }).merge( + UsersSchema.pick({ + email: true, + firstName: true, + lastName: true, + username: true + }) +); + export const registerSecretApprovalRequestRouter = async (server: FastifyZodProvider) => { server.route({ method: "GET", @@ -38,13 +46,23 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv id: z.string(), name: z.string(), approvals: z.number(), - approvers: z.string().array(), - secretPath: z.string().optional().nullable() + approvers: z + .object({ + userId: z.string().nullable().optional() + }) + .array(), + secretPath: z.string().optional().nullable(), + enforcementLevel: z.string() }), + committerUser: approvalRequestUser, commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(), environment: z.string(), - reviewers: z.object({ member: z.string(), status: z.string() }).array(), - approvers: z.string().array() + reviewers: z.object({ userId: z.string(), status: z.string() }).array(), + approvers: z + .object({ + userId: z.string().nullable().optional() + }) + .array() }).array() }) } @@ -105,6 +123,9 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv params: z.object({ id: z.string() }), + body: z.object({ + bypassReason: z.string().optional() + }), response: { 200: z.object({ approval: SecretApprovalRequestsSchema @@ -118,7 +139,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv actor: req.permission.type, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - approvalId: req.params.id + approvalId: req.params.id, + bypassReason: req.body.bypassReason }); return { approval }; } @@ -195,7 +217,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv type: isClosing ? EventType.SECRET_APPROVAL_CLOSED : EventType.SECRET_APPROVAL_REOPENED, // eslint-disable-next-line metadata: { - [isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangeBy as string, + [isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangedByUserId as string, secretApprovalRequestId: approval.id, secretApprovalRequestSlug: approval.slug // eslint-disable-next-line @@ -216,6 +238,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv }) .array() .optional(); + server.route({ method: "GET", url: "/:id", @@ -235,53 +258,41 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv id: z.string(), name: z.string(), approvals: z.number(), - approvers: z.string().array(), - secretPath: z.string().optional().nullable() + approvers: approvalRequestUser.array(), + secretPath: z.string().optional().nullable(), + enforcementLevel: z.string() }), environment: z.string(), - reviewers: z.object({ member: z.string(), status: z.string() }).array(), - approvers: z.string().array(), + statusChangedByUser: approvalRequestUser.optional(), + committerUser: approvalRequestUser, + reviewers: approvalRequestUser.extend({ status: z.string() }).array(), secretPath: z.string(), - commits: SecretApprovalRequestsSecretsSchema.omit({ secretBlindIndex: true }) - .merge( - z.object({ - tags: tagSchema, - secret: SecretsSchema.pick({ - id: true, - version: true, - secretKeyIV: true, - secretKeyTag: true, - secretKeyCiphertext: true, - secretValueIV: true, - secretValueTag: true, - secretValueCiphertext: true, - secretCommentIV: true, - secretCommentTag: true, - secretCommentCiphertext: true + commits: secretRawSchema + .omit({ _id: true, environment: true, workspace: true, type: true, version: true }) + .extend({ + op: z.string(), + tags: tagSchema, + secret: z + .object({ + id: z.string(), + version: z.number(), + secretKey: z.string(), + secretValue: z.string().optional(), + secretComment: z.string().optional() }) - .optional() - .nullable(), - secretVersion: SecretVersionsSchema.pick({ - id: true, - version: true, - secretKeyIV: true, - secretKeyTag: true, - secretKeyCiphertext: true, - secretValueIV: true, - secretValueTag: true, - secretValueCiphertext: true, - secretCommentIV: true, - secretCommentTag: true, - secretCommentCiphertext: true + .optional() + .nullable(), + secretVersion: z + .object({ + id: z.string(), + version: z.number(), + secretKey: z.string(), + secretValue: z.string().optional(), + secretComment: z.string().optional(), + tags: tagSchema }) - .merge( - z.object({ - tags: tagSchema - }) - ) - .optional() - }) - ) + .optional() + }) .array() }) ) diff --git a/backend/src/ee/routes/v1/secret-rotation-router.ts b/backend/src/ee/routes/v1/secret-rotation-router.ts index d951eb7446..936459fa16 100644 --- a/backend/src/ee/routes/v1/secret-rotation-router.ts +++ b/backend/src/ee/routes/v1/secret-rotation-router.ts @@ -1,6 +1,6 @@ import { z } from "zod"; -import { SecretRotationOutputsSchema, SecretRotationsSchema, SecretsSchema } from "@app/db/schemas"; +import { SecretRotationOutputsSchema, SecretRotationsSchema } from "@app/db/schemas"; import { removeTrailingSlash } from "@app/lib/fn"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; @@ -112,18 +112,10 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) = outputs: z .object({ key: z.string(), - secret: SecretsSchema.pick({ - id: true, - version: true, - secretKeyIV: true, - secretKeyTag: true, - secretKeyCiphertext: true, - secretValueIV: true, - secretValueTag: true, - secretValueCiphertext: true, - secretCommentIV: true, - secretCommentTag: true, - secretCommentCiphertext: true + secret: z.object({ + secretKey: z.string(), + id: z.string(), + version: z.number() }) }) .array() diff --git a/backend/src/ee/routes/v1/secret-scanning-router.ts b/backend/src/ee/routes/v1/secret-scanning-router.ts index 2604d72324..89784600a6 100644 --- a/backend/src/ee/routes/v1/secret-scanning-router.ts +++ b/backend/src/ee/routes/v1/secret-scanning-router.ts @@ -2,6 +2,8 @@ import { z } from "zod"; import { GitAppOrgSchema, SecretScanningGitRisksSchema } from "@app/db/schemas"; import { SecretScanningRiskStatus } from "@app/ee/services/secret-scanning/secret-scanning-types"; +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError } from "@app/lib/errors"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -23,6 +25,13 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) = }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { + const appCfg = getConfig(); + if (!appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(req.auth.orgId)) { + throw new BadRequestError({ + message: "Secret scanning is temporarily unavailable." + }); + } + const session = await server.services.secretScanning.createInstallationSession({ actor: req.permission.type, actorId: req.permission.id, @@ -30,6 +39,7 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) = actorOrgId: req.permission.orgId, orgId: req.body.organizationId }); + return session; } }); diff --git a/backend/src/ee/routes/v1/secret-version-router.ts b/backend/src/ee/routes/v1/secret-version-router.ts index 0604135ba8..11443ebfee 100644 --- a/backend/src/ee/routes/v1/secret-version-router.ts +++ b/backend/src/ee/routes/v1/secret-version-router.ts @@ -1,8 +1,8 @@ import { z } from "zod"; -import { SecretVersionsSchema } from "@app/db/schemas"; import { readLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { secretRawSchema } from "@app/server/routes/sanitizedSchemas"; import { AuthMode } from "@app/services/auth/auth-type"; export const registerSecretVersionRouter = async (server: FastifyZodProvider) => { @@ -22,7 +22,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) => }), response: { 200: z.object({ - secretVersions: SecretVersionsSchema.omit({ secretBlindIndex: true }).array() + secretVersions: secretRawSchema.array() }) } }, diff --git a/backend/src/ee/routes/v1/snapshot-router.ts b/backend/src/ee/routes/v1/snapshot-router.ts index 6767f83834..a716aabd7a 100644 --- a/backend/src/ee/routes/v1/snapshot-router.ts +++ b/backend/src/ee/routes/v1/snapshot-router.ts @@ -1,9 +1,10 @@ import { z } from "zod"; -import { SecretSnapshotsSchema, SecretTagsSchema, SecretVersionsSchema } from "@app/db/schemas"; +import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas"; import { PROJECTS } from "@app/lib/api-docs"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { secretRawSchema } from "@app/server/routes/sanitizedSchemas"; import { AuthMode } from "@app/services/auth/auth-type"; export const registerSnapshotRouter = async (server: FastifyZodProvider) => { @@ -27,17 +28,17 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => { slug: z.string(), name: z.string() }), - secretVersions: SecretVersionsSchema.omit({ secretBlindIndex: true }) - .merge( - z.object({ - tags: SecretTagsSchema.pick({ - id: true, - slug: true, - name: true, - color: true - }).array() - }) - ) + secretVersions: secretRawSchema + .omit({ _id: true, environment: true, workspace: true, type: true }) + .extend({ + secretId: z.string(), + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + name: true, + color: true + }).array() + }) .array(), folderVersion: z.object({ id: z.string(), name: z.string() }).array(), createdAt: z.date(), diff --git a/backend/src/ee/routes/v1/user-additional-privilege-router.ts b/backend/src/ee/routes/v1/user-additional-privilege-router.ts index 7225caecf0..e58a6335b6 100644 --- a/backend/src/ee/routes/v1/user-additional-privilege-router.ts +++ b/backend/src/ee/routes/v1/user-additional-privilege-router.ts @@ -2,17 +2,18 @@ import slugify from "@sindresorhus/slugify"; import ms from "ms"; import { z } from "zod"; -import { ProjectUserAdditionalPrivilegeSchema } from "@app/db/schemas"; +import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission"; import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types"; import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege"; import { AuthMode } from "@app/services/auth/auth-type"; export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => { server.route({ - url: "/permanent", + url: "/", method: "POST", config: { rateLimit: writeLimit @@ -31,66 +32,30 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr }) .optional() .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug), - permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions) - }), - response: { - 200: z.object({ - privilege: ProjectUserAdditionalPrivilegeSchema - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const privilege = await server.services.projectUserAdditionalPrivilege.create({ - actorId: req.permission.id, - actor: req.permission.type, - actorOrgId: req.permission.orgId, - actorAuthMethod: req.permission.authMethod, - ...req.body, - slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)), - isTemporary: false, - permissions: JSON.stringify(req.body.permissions) - }); - return { privilege }; - } - }); - - server.route({ - method: "POST", - url: "/temporary", - config: { - rateLimit: writeLimit - }, - schema: { - body: z.object({ - projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId), - slug: z - .string() - .min(1) - .max(60) - .trim() - .refine((v) => v.toLowerCase() === v, "Slug must be lowercase") - .refine((v) => slugify(v) === v, { - message: "Slug must be a valid slug" + permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions), + type: z.discriminatedUnion("isTemporary", [ + z.object({ + isTemporary: z.literal(false) + }), + z.object({ + isTemporary: z.literal(true), + temporaryMode: z + .nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode) + .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode), + temporaryRange: z + .string() + .refine((val) => ms(val) > 0, "Temporary range must be a positive number") + .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange), + temporaryAccessStartTime: z + .string() + .datetime() + .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime) }) - .optional() - .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug), - permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions), - temporaryMode: z - .nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode) - .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode), - temporaryRange: z - .string() - .refine((val) => ms(val) > 0, "Temporary range must be a positive number") - .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange), - temporaryAccessStartTime: z - .string() - .datetime() - .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime) + ]) }), response: { 200: z.object({ - privilege: ProjectUserAdditionalPrivilegeSchema + privilege: SanitizedUserProjectAdditionalPrivilegeSchema }) } }, @@ -101,10 +66,10 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr actor: req.permission.type, actorOrgId: req.permission.orgId, actorAuthMethod: req.permission.authMethod, - ...req.body, - slug: req.body.slug ? slugify(req.body.slug) : `privilege-${slugify(alphaNumericNanoId(12))}`, - isTemporary: true, - permissions: JSON.stringify(req.body.permissions) + projectMembershipId: req.body.projectMembershipId, + ...req.body.type, + slug: req.body.slug || slugify(alphaNumericNanoId(8)), + permissions: req.body.permissions }); return { privilege }; } @@ -131,24 +96,31 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr message: "Slug must be a valid slug" }) .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug), - permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions), - isTemporary: z.boolean().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary), - temporaryMode: z - .nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode) - .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode), - temporaryRange: z - .string() - .refine((val) => ms(val) > 0, "Temporary range must be a positive number") - .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange), - temporaryAccessStartTime: z - .string() - .datetime() - .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime) + permissions: ProjectPermissionV2Schema.array() + .optional() + .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions), + type: z.discriminatedUnion("isTemporary", [ + z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }), + z.object({ + isTemporary: z.literal(true).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary), + temporaryMode: z + .nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode) + .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode), + temporaryRange: z + .string() + .refine((val) => typeof val === "undefined" || ms(val) > 0, "Temporary range must be a positive number") + .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange), + temporaryAccessStartTime: z + .string() + .datetime() + .describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime) + }) + ]) }) .partial(), response: { 200: z.object({ - privilege: ProjectUserAdditionalPrivilegeSchema + privilege: SanitizedUserProjectAdditionalPrivilegeSchema }) } }, @@ -160,7 +132,12 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr actorOrgId: req.permission.orgId, actorAuthMethod: req.permission.authMethod, ...req.body, - permissions: req.body.permissions ? JSON.stringify(req.body.permissions) : undefined, + ...req.body.type, + permissions: req.body.permissions + ? // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + req.body.permissions + : undefined, privilegeId: req.params.privilegeId }); return { privilege }; @@ -179,7 +156,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr }), response: { 200: z.object({ - privilege: ProjectUserAdditionalPrivilegeSchema + privilege: SanitizedUserProjectAdditionalPrivilegeSchema }) } }, @@ -208,7 +185,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr }), response: { 200: z.object({ - privileges: ProjectUserAdditionalPrivilegeSchema.array() + privileges: SanitizedUserProjectAdditionalPrivilegeSchema.omit({ permissions: true }).array() }) } }, @@ -233,11 +210,11 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr }, schema: { params: z.object({ - privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.GET_BY_PRIVILEGEID.privilegeId) + privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.GET_BY_PRIVILEGE_ID.privilegeId) }), response: { 200: z.object({ - privilege: ProjectUserAdditionalPrivilegeSchema + privilege: SanitizedUserProjectAdditionalPrivilegeSchema }) } }, diff --git a/backend/src/ee/routes/v2/identity-project-additional-privilege-router.ts b/backend/src/ee/routes/v2/identity-project-additional-privilege-router.ts new file mode 100644 index 0000000000..5df03f68d1 --- /dev/null +++ b/backend/src/ee/routes/v2/identity-project-additional-privilege-router.ts @@ -0,0 +1,305 @@ +import slugify from "@sindresorhus/slugify"; +import ms from "ms"; +import { z } from "zod"; + +import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types"; +import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission"; +import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege"; +import { AuthMode } from "@app/services/auth/auth-type"; + +export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Add an additional privilege for identity.", + security: [ + { + bearerAuth: [] + } + ], + body: z.object({ + identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId), + projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId), + slug: z + .string() + .min(1) + .max(60) + .trim() + .refine((val) => val.toLowerCase() === val, "Must be lowercase") + .refine((v) => slugify(v) === v, { + message: "Slug must be a valid slug" + }) + .optional() + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug), + permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission), + type: z.discriminatedUnion("isTemporary", [ + z.object({ + isTemporary: z.literal(false) + }), + z.object({ + isTemporary: z.literal(true), + temporaryMode: z + .nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode) + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryMode), + temporaryRange: z + .string() + .refine((val) => ms(val) > 0, "Temporary range must be a positive number") + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryRange), + temporaryAccessStartTime: z + .string() + .datetime() + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryAccessStartTime) + }) + ]) + }), + response: { + 200: z.object({ + privilege: SanitizedIdentityPrivilegeSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const privilege = await server.services.identityProjectAdditionalPrivilegeV2.create({ + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actor: req.permission.type, + projectId: req.body.projectId, + identityId: req.body.identityId, + ...req.body.type, + slug: req.body.slug || slugify(alphaNumericNanoId(8)), + permissions: req.body.permissions + }); + return { privilege }; + } + }); + + server.route({ + method: "PATCH", + url: "/:id", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Update a specific identity privilege.", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id) + }), + body: z.object({ + slug: z + .string() + .min(1) + .max(60) + .trim() + .refine((val) => val.toLowerCase() === val, "Must be lowercase") + .refine((v) => slugify(v) === v, { + message: "Slug must be a valid slug" + }) + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug), + permissions: ProjectPermissionV2Schema.array() + .optional() + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission), + type: z.discriminatedUnion("isTemporary", [ + z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }), + z.object({ + isTemporary: z.literal(true).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary), + temporaryMode: z + .nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode) + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryMode), + temporaryRange: z + .string() + .refine((val) => typeof val === "undefined" || ms(val) > 0, "Temporary range must be a positive number") + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryRange), + temporaryAccessStartTime: z + .string() + .datetime() + .describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryAccessStartTime) + }) + ]) + }), + response: { + 200: z.object({ + privilege: SanitizedIdentityPrivilegeSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const privilege = await server.services.identityProjectAdditionalPrivilegeV2.updateById({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + id: req.params.id, + data: { + ...req.body, + ...req.body.type, + permissions: req.body.permissions || undefined + } + }); + return { privilege }; + } + }); + + server.route({ + method: "DELETE", + url: "/:id", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Delete the specified identity privilege.", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.DELETE.id) + }), + response: { + 200: z.object({ + privilege: SanitizedIdentityPrivilegeSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const privilege = await server.services.identityProjectAdditionalPrivilegeV2.deleteById({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + return { privilege }; + } + }); + + server.route({ + method: "GET", + url: "/:id", + config: { + rateLimit: readLimit + }, + schema: { + description: "Retrieve details of a specific privilege by id.", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_ID.id) + }), + response: { + 200: z.object({ + privilege: SanitizedIdentityPrivilegeSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const privilege = await server.services.identityProjectAdditionalPrivilegeV2.getPrivilegeDetailsById({ + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + return { privilege }; + } + }); + + server.route({ + method: "GET", + url: "/slug/:privilegeSlug", + config: { + rateLimit: readLimit + }, + schema: { + description: "Retrieve details of a specific privilege by slug.", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + privilegeSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.slug) + }), + querystring: z.object({ + identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.identityId), + projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.projectSlug) + }), + response: { + 200: z.object({ + privilege: SanitizedIdentityPrivilegeSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const privilege = await server.services.identityProjectAdditionalPrivilegeV2.getPrivilegeDetailsBySlug({ + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + slug: req.params.privilegeSlug, + ...req.query + }); + return { privilege }; + } + }); + + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + description: "List privileges for the specified identity by project.", + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.LIST.identityId), + projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.LIST.projectId) + }), + response: { + 200: z.object({ + privileges: SanitizedIdentityPrivilegeSchema.omit({ permissions: true }).array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const privileges = await server.services.identityProjectAdditionalPrivilegeV2.listIdentityProjectPrivileges({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.query + }); + return { + privileges + }; + } + }); +}; diff --git a/backend/src/ee/routes/v2/index.ts b/backend/src/ee/routes/v2/index.ts new file mode 100644 index 0000000000..bede5a1cf8 --- /dev/null +++ b/backend/src/ee/routes/v2/index.ts @@ -0,0 +1,16 @@ +import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router"; +import { registerProjectRoleRouter } from "./project-role-router"; + +export const registerV2EERoutes = async (server: FastifyZodProvider) => { + // org role starts with organization + await server.register( + async (projectRouter) => { + await projectRouter.register(registerProjectRoleRouter); + }, + { prefix: "/workspace" } + ); + + await server.register(registerIdentityProjectAdditionalPrivilegeRouter, { + prefix: "/identity-project-additional-privilege" + }); +}; diff --git a/backend/src/ee/routes/v2/project-role-router.ts b/backend/src/ee/routes/v2/project-role-router.ts new file mode 100644 index 0000000000..70511ce87e --- /dev/null +++ b/backend/src/ee/routes/v2/project-role-router.ts @@ -0,0 +1,242 @@ +import { packRules } from "@casl/ability/extra"; +import slugify from "@sindresorhus/slugify"; +import { z } from "zod"; + +import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas"; +import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission"; +import { PROJECT_ROLE } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { ProjectRoleServiceIdentifierType } from "@app/services/project-role/project-role-types"; + +export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/:projectId/roles", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Create a project role", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId) + }), + body: z.object({ + slug: z + .string() + .toLowerCase() + .trim() + .min(1) + .refine( + (val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole), + "Please choose a different slug, the slug you have entered is reserved" + ) + .refine((v) => slugify(v) === v, { + message: "Slug must be a valid" + }) + .describe(PROJECT_ROLE.CREATE.slug), + name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name), + description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description), + permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions) + }), + response: { + 200: z.object({ + role: SanitizedRoleSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const role = await server.services.projectRole.createRole({ + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actor: req.permission.type, + filter: { + type: ProjectRoleServiceIdentifierType.ID, + projectId: req.params.projectId + }, + data: { + ...req.body, + permissions: JSON.stringify(packRules(req.body.permissions)) + } + }); + return { role }; + } + }); + + server.route({ + method: "PATCH", + url: "/:projectId/roles/:roleId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Update a project role", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + projectId: z.string().trim().describe(PROJECT_ROLE.UPDATE.projectId), + roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId) + }), + body: z.object({ + slug: z + .string() + .toLowerCase() + .trim() + .optional() + .describe(PROJECT_ROLE.UPDATE.slug) + .refine( + (val) => + typeof val === "undefined" || + !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole), + "Please choose a different slug, the slug you have entered is reserved" + ) + .refine((val) => typeof val === "undefined" || slugify(val) === val, { + message: "Slug must be a valid" + }), + name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name), + description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description), + permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional() + }), + response: { + 200: z.object({ + role: SanitizedRoleSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const role = await server.services.projectRole.updateRole({ + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actor: req.permission.type, + roleId: req.params.roleId, + data: { + ...req.body, + permissions: req.body.permissions ? JSON.stringify(packRules(req.body.permissions)) : undefined + } + }); + return { role }; + } + }); + + server.route({ + method: "DELETE", + url: "/:projectId/roles/:roleId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Delete a project role", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + projectId: z.string().trim().describe(PROJECT_ROLE.DELETE.projectId), + roleId: z.string().trim().describe(PROJECT_ROLE.DELETE.roleId) + }), + response: { + 200: z.object({ + role: SanitizedRoleSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const role = await server.services.projectRole.deleteRole({ + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actor: req.permission.type, + roleId: req.params.roleId + }); + return { role }; + } + }); + + server.route({ + method: "GET", + url: "/:projectId/roles", + config: { + rateLimit: readLimit + }, + schema: { + description: "List project role", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + projectId: z.string().trim().describe(PROJECT_ROLE.LIST.projectId) + }), + response: { + 200: z.object({ + roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const roles = await server.services.projectRole.listRoles({ + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actor: req.permission.type, + filter: { + type: ProjectRoleServiceIdentifierType.ID, + projectId: req.params.projectId + } + }); + return { roles }; + } + }); + + server.route({ + method: "GET", + url: "/:projectId/roles/slug/:roleSlug", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + projectId: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.projectId), + roleSlug: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.roleSlug) + }), + response: { + 200: z.object({ + role: SanitizedRoleSchema.omit({ version: true }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const role = await server.services.projectRole.getRoleBySlug({ + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actor: req.permission.type, + filter: { + type: ProjectRoleServiceIdentifierType.ID, + projectId: req.params.projectId + }, + roleSlug: req.params.roleSlug + }); + return { role }; + } + }); +}; diff --git a/backend/src/ee/services/access-approval-policy/access-approval-policy-dal.ts b/backend/src/ee/services/access-approval-policy/access-approval-policy-dal.ts index 88e2888329..220701410f 100644 --- a/backend/src/ee/services/access-approval-policy/access-approval-policy-dal.ts +++ b/backend/src/ee/services/access-approval-policy/access-approval-policy-dal.ts @@ -1,26 +1,42 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName, TAccessApprovalPolicies } from "@app/db/schemas"; +import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { buildFindFilter, mergeOneToManyRelation, ormify, selectAllTableCols, TFindFilter } from "@app/lib/knex"; +import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex"; + +import { ApproverType } from "./access-approval-policy-types"; export type TAccessApprovalPolicyDALFactory = ReturnType; export const accessApprovalPolicyDALFactory = (db: TDbClient) => { const accessApprovalPolicyOrm = ormify(db, TableName.AccessApprovalPolicy); - const accessApprovalPolicyFindQuery = async (tx: Knex, filter: TFindFilter) => { + const accessApprovalPolicyFindQuery = async ( + tx: Knex, + filter: TFindFilter, + customFilter?: { + policyId?: string; + } + ) => { const result = await tx(TableName.AccessApprovalPolicy) // eslint-disable-next-line .where(buildFindFilter(filter)) + .where((qb) => { + if (customFilter?.policyId) { + void qb.where(`${TableName.AccessApprovalPolicy}.id`, "=", customFilter.policyId); + } + }) .join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`) - .join( + .leftJoin( TableName.AccessApprovalPolicyApprover, `${TableName.AccessApprovalPolicy}.id`, `${TableName.AccessApprovalPolicyApprover}.policyId` ) - .select(tx.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover)) + .leftJoin(TableName.Users, `${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.Users}.id`) + .select(tx.ref("username").withSchema(TableName.Users).as("approverUsername")) + .select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover)) + .select(tx.ref("approverGroupId").withSchema(TableName.AccessApprovalPolicyApprover)) .select(tx.ref("name").withSchema(TableName.Environment).as("envName")) .select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug")) .select(tx.ref("id").withSchema(TableName.Environment).as("envId")) @@ -30,43 +46,94 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => { return result; }; - const findById = async (id: string, tx?: Knex) => { + const findById = async (policyId: string, tx?: Knex) => { try { - const doc = await accessApprovalPolicyFindQuery(tx || db, { - [`${TableName.AccessApprovalPolicy}.id` as "id"]: id + const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), { + [`${TableName.AccessApprovalPolicy}.id` as "id"]: policyId }); - const formatedDoc = mergeOneToManyRelation( - doc, - "id", - ({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({ - ...el, - envId, - environment: { id: envId, name, slug } + const formattedDoc = sqlNestRelationships({ + data: doc, + key: "id", + parentMapper: (data) => ({ + environment: { + id: data.envId, + name: data.envName, + slug: data.envSlug + }, + projectId: data.projectId, + ...AccessApprovalPoliciesSchema.parse(data) }), - ({ approverId }) => approverId, - "approvers" - ); - return formatedDoc?.[0]; + childrenMapper: [ + { + key: "approverUserId", + label: "approvers" as const, + mapper: ({ approverUserId: id }) => ({ + id, + type: "user" + }) + }, + { + key: "approverGroupId", + label: "approvers" as const, + mapper: ({ approverGroupId: id }) => ({ + id, + type: "group" + }) + } + ] + }); + + return formattedDoc?.[0]; } catch (error) { throw new DatabaseError({ error, name: "FindById" }); } }; - const find = async (filter: TFindFilter, tx?: Knex) => { + const find = async ( + filter: TFindFilter, + customFilter?: { + policyId?: string; + }, + tx?: Knex + ) => { try { - const docs = await accessApprovalPolicyFindQuery(tx || db, filter); - const formatedDoc = mergeOneToManyRelation( - docs, - "id", - ({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({ - ...el, - envId, - environment: { id: envId, name, slug } + const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter, customFilter); + + const formattedDocs = sqlNestRelationships({ + data: docs, + key: "id", + parentMapper: (data) => ({ + environment: { + id: data.envId, + name: data.envName, + slug: data.envSlug + }, + projectId: data.projectId, + ...AccessApprovalPoliciesSchema.parse(data) + // secretPath: data.secretPath || undefined, }), - ({ approverId }) => approverId, - "approvers" - ); - return formatedDoc.map((policy) => ({ ...policy, secretPath: policy.secretPath || undefined })); + childrenMapper: [ + { + key: "approverUserId", + label: "approvers" as const, + mapper: ({ approverUserId: id, approverUsername }) => ({ + id, + type: ApproverType.User, + name: approverUsername + }) + }, + { + key: "approverGroupId", + label: "approvers" as const, + mapper: ({ approverGroupId: id }) => ({ + id, + type: ApproverType.Group + }) + } + ] + }); + + return formattedDocs; } catch (error) { throw new DatabaseError({ error, name: "Find" }); } diff --git a/backend/src/ee/services/access-approval-policy/access-approval-policy-fns.ts b/backend/src/ee/services/access-approval-policy/access-approval-policy-fns.ts deleted file mode 100644 index 7b0a2681fa..0000000000 --- a/backend/src/ee/services/access-approval-policy/access-approval-policy-fns.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { ForbiddenError, subject } from "@casl/ability"; - -import { BadRequestError } from "@app/lib/errors"; -import { ActorType } from "@app/services/auth/auth-type"; - -import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission"; -import { TVerifyApprovers } from "./access-approval-policy-types"; - -export const verifyApprovers = async ({ - userIds, - projectId, - orgId, - envSlug, - actorAuthMethod, - secretPath, - permissionService -}: TVerifyApprovers) => { - for await (const userId of userIds) { - try { - const { permission: approverPermission } = await permissionService.getProjectPermission( - ActorType.USER, - userId, - projectId, - actorAuthMethod, - orgId - ); - - ForbiddenError.from(approverPermission).throwUnlessCan( - ProjectPermissionActions.Create, - subject(ProjectPermissionSub.Secrets, { environment: envSlug, secretPath }) - ); - } catch (err) { - throw new BadRequestError({ message: "One or more approvers doesn't have access to be specified secret path" }); - } - } -}; diff --git a/backend/src/ee/services/access-approval-policy/access-approval-policy-service.ts b/backend/src/ee/services/access-approval-policy/access-approval-policy-service.ts index 51a51abb5a..ee7cf25729 100644 --- a/backend/src/ee/services/access-approval-policy/access-approval-policy-service.ts +++ b/backend/src/ee/services/access-approval-policy/access-approval-policy-service.ts @@ -2,17 +2,20 @@ import { ForbiddenError } from "@casl/ability"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal"; import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; +import { TUserDALFactory } from "@app/services/user/user-dal"; +import { TGroupDALFactory } from "../group/group-dal"; import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal"; import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal"; -import { verifyApprovers } from "./access-approval-policy-fns"; import { + ApproverType, TCreateAccessApprovalPolicy, TDeleteAccessApprovalPolicy, + TGetAccessApprovalPolicyByIdDTO, TGetAccessPolicyCountByEnvironmentDTO, TListAccessApprovalPoliciesDTO, TUpdateAccessApprovalPolicy @@ -25,6 +28,8 @@ type TSecretApprovalPolicyServiceFactoryDep = { projectEnvDAL: Pick; accessApprovalPolicyApproverDAL: TAccessApprovalPolicyApproverDALFactory; projectMembershipDAL: Pick; + groupDAL: TGroupDALFactory; + userDAL: Pick; }; export type TAccessApprovalPolicyServiceFactory = ReturnType; @@ -32,10 +37,11 @@ export type TAccessApprovalPolicyServiceFactory = ReturnType { const createAccessApprovalPolicy = async ({ name, @@ -47,12 +53,27 @@ export const accessApprovalPolicyServiceFactory = ({ approvals, approvers, projectSlug, - environment + environment, + enforcementLevel }: TCreateAccessApprovalPolicy) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); - if (approvals > approvers.length) + // If there is a group approver people might be added to the group later to meet the approvers quota + const groupApprovers = approvers + .filter((approver) => approver.type === ApproverType.Group) + .map((approver) => approver.id) as string[]; + + const userApprovers = approvers + .filter((approver) => approver.type === ApproverType.User) + .map((approver) => approver.id) + .filter(Boolean) as string[]; + + const userApproverNames = approvers + .map((approver) => (approver.type === ApproverType.User ? approver.name : undefined)) + .filter(Boolean) as string[]; + + if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length) throw new BadRequestError({ message: "Approvals cannot be greater than approvers" }); const { permission } = await permissionService.getProjectPermission( @@ -67,26 +88,50 @@ export const accessApprovalPolicyServiceFactory = ({ ProjectPermissionSub.SecretApproval ); const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id }); - if (!env) throw new BadRequestError({ message: "Environment not found" }); + if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` }); - const secretApprovers = await projectMembershipDAL.find({ - projectId: project.id, - $in: { id: approvers } - }); + let approverUserIds = userApprovers; + if (userApproverNames.length) { + const approverUsers = await userDAL.find({ + $in: { + username: userApproverNames + } + }); - if (secretApprovers.length !== approvers.length) { - throw new BadRequestError({ message: "Approver not found in project" }); + const approverNamesFromDb = approverUsers.map((user) => user.username); + const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username)); + + if (invalidUsernames.length) { + throw new BadRequestError({ + message: `Invalid approver user: ${invalidUsernames.join(", ")}` + }); + } + + approverUserIds = approverUserIds.concat(approverUsers.map((user) => user.id)); } - await verifyApprovers({ - projectId: project.id, - orgId: actorOrgId, - envSlug: environment, - secretPath, - actorAuthMethod, - permissionService, - userIds: secretApprovers.map((approver) => approver.userId) - }); + const usersPromises: Promise< + { + id: string; + email: string | null | undefined; + username: string; + firstName: string | null | undefined; + lastName: string | null | undefined; + isPartOfGroup: boolean; + }[] + >[] = []; + const verifyAllApprovers = [...approverUserIds]; + + for (const groupId of groupApprovers) { + usersPromises.push( + groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }).then((group) => group.members) + ); + } + const verifyGroupApprovers = (await Promise.all(usersPromises)) + .flat() + .filter((user) => user.isPartOfGroup) + .map((user) => user.id); + verifyAllApprovers.push(...verifyGroupApprovers); const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => { const doc = await accessApprovalPolicyDAL.create( @@ -94,17 +139,31 @@ export const accessApprovalPolicyServiceFactory = ({ envId: env.id, approvals, secretPath, - name + name, + enforcementLevel }, tx ); - await accessApprovalPolicyApproverDAL.insertMany( - secretApprovers.map(({ id }) => ({ - approverId: id, - policyId: doc.id - })), - tx - ); + if (approverUserIds.length) { + await accessApprovalPolicyApproverDAL.insertMany( + approverUserIds.map((userId) => ({ + approverUserId: userId, + policyId: doc.id + })), + tx + ); + } + + if (groupApprovers) { + await accessApprovalPolicyApproverDAL.insertMany( + groupApprovers.map((groupId) => ({ + approverGroupId: groupId, + policyId: doc.id + })), + tx + ); + } + return doc; }); return { ...accessApproval, environment: env, projectId: project.id }; @@ -118,7 +177,7 @@ export const accessApprovalPolicyServiceFactory = ({ projectSlug }: TListAccessApprovalPoliciesDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); // Anyone in the project should be able to get the policies. /* const { permission } = */ await permissionService.getProjectPermission( @@ -143,10 +202,35 @@ export const accessApprovalPolicyServiceFactory = ({ actor, actorOrgId, actorAuthMethod, - approvals + approvals, + enforcementLevel }: TUpdateAccessApprovalPolicy) => { + const groupApprovers = approvers + .filter((approver) => approver.type === ApproverType.Group) + .map((approver) => approver.id) as string[]; + + const userApprovers = approvers + .filter((approver) => approver.type === ApproverType.User) + .map((approver) => approver.id) + .filter(Boolean) as string[]; + + const userApproverNames = approvers + .map((approver) => (approver.type === ApproverType.User ? approver.name : undefined)) + .filter(Boolean) as string[]; + const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId); - if (!accessApprovalPolicy) throw new BadRequestError({ message: "Secret approval policy not found" }); + const currentAppovals = approvals || accessApprovalPolicy.approvals; + if ( + groupApprovers?.length === 0 && + userApprovers && + currentAppovals > userApprovers.length + userApproverNames.length + ) { + throw new BadRequestError({ message: "Approvals cannot be greater than approvers" }); + } + + if (!accessApprovalPolicy) { + throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` }); + } const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -163,41 +247,54 @@ export const accessApprovalPolicyServiceFactory = ({ { approvals, secretPath, - name + name, + enforcementLevel }, tx ); - if (approvers) { - // Find the workspace project memberships of the users passed in the approvers array - const secretApprovers = await projectMembershipDAL.find( - { - projectId: accessApprovalPolicy.projectId, - $in: { id: approvers } - }, - { tx } - ); - await verifyApprovers({ - projectId: accessApprovalPolicy.projectId, - orgId: actorOrgId, - envSlug: accessApprovalPolicy.environment.slug, - secretPath: doc.secretPath!, - actorAuthMethod, - permissionService, - userIds: secretApprovers.map((approver) => approver.userId) - }); + await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx); + + if (userApprovers.length || userApproverNames.length) { + let userApproverIds = userApprovers; + if (userApproverNames.length) { + const approverUsers = await userDAL.find({ + $in: { + username: userApproverNames + } + }); + + const approverNamesFromDb = approverUsers.map((user) => user.username); + const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username)); + + if (invalidUsernames.length) { + throw new BadRequestError({ + message: `Invalid approver user: ${invalidUsernames.join(", ")}` + }); + } + + userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id)); + } - if (secretApprovers.length !== approvers.length) - throw new BadRequestError({ message: "Approvals cannot be greater than approvers" }); - await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx); await accessApprovalPolicyApproverDAL.insertMany( - secretApprovers.map(({ id }) => ({ - approverId: id, + userApproverIds.map((userId) => ({ + approverUserId: userId, policyId: doc.id })), tx ); } + + if (groupApprovers) { + await accessApprovalPolicyApproverDAL.insertMany( + groupApprovers.map((groupId) => ({ + approverGroupId: groupId, + policyId: doc.id + })), + tx + ); + } + return doc; }); return { @@ -215,7 +312,7 @@ export const accessApprovalPolicyServiceFactory = ({ actorOrgId }: TDeleteAccessApprovalPolicy) => { const policy = await accessApprovalPolicyDAL.findById(policyId); - if (!policy) throw new BadRequestError({ message: "Secret approval policy not found" }); + if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -243,7 +340,7 @@ export const accessApprovalPolicyServiceFactory = ({ }: TGetAccessPolicyCountByEnvironmentDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const { membership } = await permissionService.getProjectPermission( actor, @@ -252,22 +349,53 @@ export const accessApprovalPolicyServiceFactory = ({ actorAuthMethod, actorOrgId ); - if (!membership) throw new BadRequestError({ message: "User not found in project" }); + if (!membership) { + throw new ForbiddenRequestError({ message: "You are not a member of this project" }); + } const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug }); - if (!environment) throw new BadRequestError({ message: "Environment not found" }); + if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` }); const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id }); - if (!policies) throw new BadRequestError({ message: "No policies found" }); + if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` }); return { count: policies.length }; }; + const getAccessApprovalPolicyById = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + policyId + }: TGetAccessApprovalPolicyByIdDTO) => { + const [policy] = await accessApprovalPolicyDAL.find({}, { policyId }); + + if (!policy) { + throw new NotFoundError({ + message: `Cannot find access approval policy with ID ${policyId}` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + policy.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval); + + return policy; + }; + return { getAccessPolicyCountByEnvSlug, createAccessApprovalPolicy, deleteAccessApprovalPolicy, updateAccessApprovalPolicy, - getAccessApprovalPolicyByProjectSlug + getAccessApprovalPolicyByProjectSlug, + getAccessApprovalPolicyById }; }; diff --git a/backend/src/ee/services/access-approval-policy/access-approval-policy-types.ts b/backend/src/ee/services/access-approval-policy/access-approval-policy-types.ts index 601561b680..a42c89e7a7 100644 --- a/backend/src/ee/services/access-approval-policy/access-approval-policy-types.ts +++ b/backend/src/ee/services/access-approval-policy/access-approval-policy-types.ts @@ -1,9 +1,9 @@ -import { TProjectPermission } from "@app/lib/types"; +import { EnforcementLevel, TProjectPermission } from "@app/lib/types"; import { ActorAuthMethod } from "@app/services/auth/auth-type"; import { TPermissionServiceFactory } from "../permission/permission-service"; -export type TVerifyApprovers = { +export type TIsApproversValid = { userIds: string[]; permissionService: Pick; envSlug: string; @@ -13,21 +13,28 @@ export type TVerifyApprovers = { orgId: string; }; +export enum ApproverType { + Group = "group", + User = "user" +} + export type TCreateAccessApprovalPolicy = { approvals: number; secretPath: string; environment: string; - approvers: string[]; + approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[]; projectSlug: string; name: string; + enforcementLevel: EnforcementLevel; } & Omit; export type TUpdateAccessApprovalPolicy = { policyId: string; approvals?: number; - approvers?: string[]; + approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[]; secretPath?: string; name?: string; + enforcementLevel?: EnforcementLevel; } & Omit; export type TDeleteAccessApprovalPolicy = { @@ -39,6 +46,10 @@ export type TGetAccessPolicyCountByEnvironmentDTO = { projectSlug: string; } & Omit; +export type TGetAccessApprovalPolicyByIdDTO = { + policyId: string; +} & Omit; + export type TListAccessApprovalPoliciesDTO = { projectSlug: string; } & Omit; diff --git a/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts b/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts index c3f4c72a66..8784d05e2c 100644 --- a/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts +++ b/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts @@ -1,7 +1,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests } from "@app/db/schemas"; +import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests, TUsers } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex"; @@ -14,7 +14,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => { try { - const docs = await db(TableName.AccessApprovalRequest) + const docs = await db + .replicaNode()(TableName.AccessApprovalRequest) .whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds) .leftJoin( @@ -38,6 +39,18 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { `${TableName.AccessApprovalPolicy}.id`, `${TableName.AccessApprovalPolicyApprover}.policyId` ) + .leftJoin( + TableName.UserGroupMembership, + `${TableName.AccessApprovalPolicyApprover}.approverGroupId`, + `${TableName.UserGroupMembership}.groupId` + ) + .leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`) + + .join( + db(TableName.Users).as("requestedByUser"), + `${TableName.AccessApprovalRequest}.requestedByUserId`, + `requestedByUser.id` + ) .leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`) @@ -47,10 +60,12 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { db.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"), db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"), db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"), + db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"), db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId") ) - .select(db.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover)) + .select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover)) + .select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId")) .select( db.ref("projectId").withSchema(TableName.Environment), @@ -59,15 +74,20 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { ) .select( - db.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"), + db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"), db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus") ) + // TODO: ADD SUPPORT FOR GROUPS!!!! .select( - db - .ref("projectMembershipId") - .withSchema(TableName.ProjectUserAdditionalPrivilege) - .as("privilegeMembershipId"), + db.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"), + db.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"), + db.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"), + db.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"), + + db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeUserId"), + db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeMembershipId"), + db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeIsTemporary"), db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryMode"), db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryRange"), @@ -97,11 +117,21 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { name: doc.policyName, approvals: doc.policyApprovals, secretPath: doc.policySecretPath, + enforcementLevel: doc.policyEnforcementLevel, envId: doc.policyEnvId }, + requestedByUser: { + userId: doc.requestedByUserId, + email: doc.requestedByUserEmail, + firstName: doc.requestedByUserFirstName, + lastName: doc.requestedByUserLastName, + username: doc.requestedByUserUsername + }, privilege: doc.privilegeId ? { membershipId: doc.privilegeMembershipId, + userId: doc.privilegeUserId, + projectId: doc.projectId, isTemporary: doc.privilegeIsTemporary, temporaryMode: doc.privilegeTemporaryMode, temporaryRange: doc.privilegeTemporaryRange, @@ -115,11 +145,16 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { }), childrenMapper: [ { - key: "reviewerMemberId", + key: "reviewerUserId", label: "reviewers" as const, - mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined) + mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined) }, - { key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId } + { key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId }, + { + key: "approverGroupUserId", + label: "approvers" as const, + mapper: ({ approverGroupUserId }) => approverGroupUserId + } ] }); @@ -143,34 +178,86 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { `${TableName.AccessApprovalPolicy}.id` ) - .join( + .join( + db(TableName.Users).as("requestedByUser"), + `${TableName.AccessApprovalRequest}.requestedByUserId`, + `requestedByUser.id` + ) + + .leftJoin( TableName.AccessApprovalPolicyApprover, `${TableName.AccessApprovalPolicy}.id`, `${TableName.AccessApprovalPolicyApprover}.policyId` ) + + .leftJoin( + db(TableName.Users).as("accessApprovalPolicyApproverUser"), + `${TableName.AccessApprovalPolicyApprover}.approverUserId`, + "accessApprovalPolicyApproverUser.id" + ) + .leftJoin( + TableName.UserGroupMembership, + `${TableName.AccessApprovalPolicyApprover}.approverGroupId`, + `${TableName.UserGroupMembership}.groupId` + ) + + .leftJoin( + db(TableName.Users).as("accessApprovalPolicyGroupApproverUser"), + `${TableName.UserGroupMembership}.userId`, + "accessApprovalPolicyGroupApproverUser.id" + ) + .leftJoin( TableName.AccessApprovalRequestReviewer, `${TableName.AccessApprovalRequest}.id`, `${TableName.AccessApprovalRequestReviewer}.requestId` ) + .leftJoin( + db(TableName.Users).as("accessApprovalReviewerUser"), + `${TableName.AccessApprovalRequestReviewer}.reviewerUserId`, + `accessApprovalReviewerUser.id` + ) + .leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`) .select(selectAllTableCols(TableName.AccessApprovalRequest)) .select( - tx.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"), + tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover), + tx.ref("userId").withSchema(TableName.UserGroupMembership), + tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"), + tx.ref("email").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupEmail"), + tx.ref("username").withSchema("accessApprovalPolicyApproverUser").as("approverUsername"), + tx.ref("username").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupUsername"), + tx.ref("firstName").withSchema("accessApprovalPolicyApproverUser").as("approverFirstName"), + tx.ref("firstName").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupFirstName"), + tx.ref("lastName").withSchema("accessApprovalPolicyApproverUser").as("approverLastName"), + tx.ref("lastName").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupLastName"), + tx.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"), + tx.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"), + tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"), + tx.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"), + + tx.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer), + tx.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"), + + tx.ref("email").withSchema("accessApprovalReviewerUser").as("reviewerEmail"), + tx.ref("username").withSchema("accessApprovalReviewerUser").as("reviewerUsername"), + tx.ref("firstName").withSchema("accessApprovalReviewerUser").as("reviewerFirstName"), + tx.ref("lastName").withSchema("accessApprovalReviewerUser").as("reviewerLastName"), + tx.ref("id").withSchema(TableName.AccessApprovalPolicy).as("policyId"), tx.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"), tx.ref("projectId").withSchema(TableName.Environment), tx.ref("slug").withSchema(TableName.Environment).as("environment"), tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"), - tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"), - tx.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover) + tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"), + tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals") ); const findById = async (id: string, tx?: Knex) => { try { - const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db); + const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode()); const docs = await sql; const formatedDoc = sqlNestRelationships({ data: docs, @@ -183,16 +270,64 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { id: el.policyId, name: el.policyName, approvals: el.policyApprovals, - secretPath: el.policySecretPath + secretPath: el.policySecretPath, + enforcementLevel: el.policyEnforcementLevel + }, + requestedByUser: { + userId: el.requestedByUserId, + email: el.requestedByUserEmail, + firstName: el.requestedByUserFirstName, + lastName: el.requestedByUserLastName, + username: el.requestedByUserUsername } }), childrenMapper: [ { - key: "reviewerMemberId", + key: "reviewerUserId", label: "reviewers" as const, - mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined) + mapper: ({ + reviewerUserId: userId, + reviewerStatus: status, + reviewerEmail: email, + reviewerLastName: lastName, + reviewerUsername: username, + reviewerFirstName: firstName + }) => (userId ? { userId, status, email, firstName, lastName, username } : undefined) }, - { key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId } + { + key: "approverUserId", + label: "approvers" as const, + mapper: ({ + approverUserId, + approverEmail: email, + approverUsername: username, + approverLastName: lastName, + approverFirstName: firstName + }) => ({ + userId: approverUserId, + email, + firstName, + lastName, + username + }) + }, + { + key: "userId", + label: "approvers" as const, + mapper: ({ + userId, + approverGroupEmail: email, + approverGroupUsername: username, + approverGroupLastName: lastName, + approverFirstName: firstName + }) => ({ + userId, + email, + firstName, + lastName, + username + }) + } ] }); if (!formatedDoc?.[0]) return; @@ -207,7 +342,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { const getCount = async ({ projectId }: { projectId: string }) => { try { - const accessRequests = await db(TableName.AccessApprovalRequest) + const accessRequests = await db + .replicaNode()(TableName.AccessApprovalRequest) .leftJoin( TableName.AccessApprovalPolicy, `${TableName.AccessApprovalRequest}.policyId`, @@ -229,7 +365,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { .where(`${TableName.Environment}.projectId`, projectId) .select(selectAllTableCols(TableName.AccessApprovalRequest)) .select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")) - .select(db.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId")); + .select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId")); const formattedRequests = sqlNestRelationships({ data: accessRequests, @@ -239,9 +375,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => { }), childrenMapper: [ { - key: "reviewerMemberId", + key: "reviewerUserId", label: "reviewers" as const, - mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined) + mapper: ({ reviewerUserId: reviewer, reviewerStatus: status }) => + reviewer ? { reviewer, status } : undefined } ] }); diff --git a/backend/src/ee/services/access-approval-request/access-approval-request-fns.ts b/backend/src/ee/services/access-approval-request/access-approval-request-fns.ts index 90b42aaf7a..fad19f12fc 100644 --- a/backend/src/ee/services/access-approval-request/access-approval-request-fns.ts +++ b/backend/src/ee/services/access-approval-request/access-approval-request-fns.ts @@ -1,6 +1,6 @@ import { PackRule, unpackRules } from "@casl/ability/extra"; -import { UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError } from "@app/lib/errors"; import { TVerifyPermission } from "./access-approval-request-types"; @@ -19,7 +19,7 @@ export const verifyRequestedPermissions = ({ permissions }: TVerifyPermission) = ); if (!permission || !permission.length) { - throw new UnauthorizedError({ message: "No permission provided" }); + throw new BadRequestError({ message: "No permission provided" }); } const requestedPermissions: string[] = []; @@ -39,10 +39,10 @@ export const verifyRequestedPermissions = ({ permissions }: TVerifyPermission) = const permissionEnv = firstPermission.conditions?.environment; if (!permissionEnv || typeof permissionEnv !== "string") { - throw new UnauthorizedError({ message: "Permission environment is not a string" }); + throw new BadRequestError({ message: "Permission environment is not a string" }); } if (!permissionSecretPath || typeof permissionSecretPath !== "string") { - throw new UnauthorizedError({ message: "Permission path is not a string" }); + throw new BadRequestError({ message: "Permission path is not a string" }); } return { diff --git a/backend/src/ee/services/access-approval-request/access-approval-request-service.ts b/backend/src/ee/services/access-approval-request/access-approval-request-service.ts index becdb78daf..14accff41f 100644 --- a/backend/src/ee/services/access-approval-request/access-approval-request-service.ts +++ b/backend/src/ee/services/access-approval-request/access-approval-request-service.ts @@ -3,17 +3,21 @@ import ms from "ms"; import { ProjectMembershipRole } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal"; import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; +import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal"; +import { triggerSlackNotification } from "@app/services/slack/slack-fns"; +import { SlackTriggerFeature } from "@app/services/slack/slack-types"; import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; import { TUserDALFactory } from "@app/services/user/user-dal"; import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal"; import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal"; -import { verifyApprovers } from "../access-approval-policy/access-approval-policy-fns"; +import { TGroupDALFactory } from "../group/group-dal"; import { TPermissionServiceFactory } from "../permission/permission-service"; import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal"; import { ProjectUserAdditionalPrivilegeTemporaryMode } from "../project-user-additional-privilege/project-user-additional-privilege-types"; @@ -33,7 +37,10 @@ type TSecretApprovalRequestServiceFactoryDep = { permissionService: Pick; accessApprovalPolicyApproverDAL: Pick; projectEnvDAL: Pick; - projectDAL: Pick; + projectDAL: Pick< + TProjectDALFactory, + "checkProjectUpgradeStatus" | "findProjectBySlug" | "findProjectWithOrg" | "findById" + >; accessApprovalRequestDAL: Pick< TAccessApprovalRequestDALFactory, | "create" @@ -50,25 +57,33 @@ type TSecretApprovalRequestServiceFactoryDep = { TAccessApprovalRequestReviewerDALFactory, "create" | "find" | "findOne" | "transaction" >; + groupDAL: Pick; projectMembershipDAL: Pick; smtpService: Pick; - userDAL: Pick; + userDAL: Pick< + TUserDALFactory, + "findUserByProjectMembershipId" | "findUsersByProjectMembershipIds" | "find" | "findById" + >; + kmsService: Pick; + projectSlackConfigDAL: Pick; }; export type TAccessApprovalRequestServiceFactory = ReturnType; export const accessApprovalRequestServiceFactory = ({ + groupDAL, projectDAL, projectEnvDAL, permissionService, accessApprovalRequestDAL, accessApprovalRequestReviewerDAL, - projectMembershipDAL, accessApprovalPolicyDAL, accessApprovalPolicyApproverDAL, additionalPrivilegeDAL, smtpService, - userDAL + userDAL, + kmsService, + projectSlackConfigDAL }: TSecretApprovalRequestServiceFactoryDep) => { const createAccessApprovalRequest = async ({ isTemporary, @@ -82,7 +97,7 @@ export const accessApprovalRequestServiceFactory = ({ }: TCreateAccessApprovalRequestDTO) => { const cfg = getConfig(); const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new UnauthorizedError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); // Anyone can create an access approval request. const { membership } = await permissionService.getProjectPermission( @@ -92,35 +107,68 @@ export const accessApprovalRequestServiceFactory = ({ actorAuthMethod, actorOrgId ); - if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" }); + if (!membership) { + throw new ForbiddenRequestError({ message: "You are not a member of this project" }); + } - const requestedByUser = await userDAL.findUserByProjectMembershipId(membership.id); - if (!requestedByUser) throw new UnauthorizedError({ message: "User not found" }); + const requestedByUser = await userDAL.findById(actorId); + if (!requestedByUser) throw new ForbiddenRequestError({ message: "User not found" }); await projectDAL.checkProjectUpgradeStatus(project.id); const { envSlug, secretPath, accessTypes } = verifyRequestedPermissions({ permissions: requestedPermissions }); const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug }); - if (!environment) throw new UnauthorizedError({ message: "Environment not found" }); + if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` }); const policy = await accessApprovalPolicyDAL.findOne({ envId: environment.id, secretPath }); - if (!policy) throw new UnauthorizedError({ message: "No policy matching criteria was found." }); + if (!policy) { + throw new NotFoundError({ + message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.` + }); + } + + const approverIds: string[] = []; + const approverGroupIds: string[] = []; const approvers = await accessApprovalPolicyApproverDAL.find({ policyId: policy.id }); - const approverUsers = await userDAL.findUsersByProjectMembershipIds( - approvers.map((approver) => approver.approverId) - ); + approvers.forEach((approver) => { + if (approver.approverUserId) { + approverIds.push(approver.approverUserId); + } else if (approver.approverGroupId) { + approverGroupIds.push(approver.approverGroupId); + } + }); + + const groupUsers = ( + await Promise.all( + approverGroupIds.map((groupApproverId) => + groupDAL + .findAllGroupPossibleMembers({ + orgId: actorOrgId, + groupId: groupApproverId + }) + .then((group) => group.members) + ) + ) + ).flat(); + approverIds.push(...groupUsers.filter((user) => user.isPartOfGroup).map((user) => user.id)); + + const approverUsers = await userDAL.find({ + $in: { + id: [...new Set(approverIds)] + } + }); const duplicateRequests = await accessApprovalRequestDAL.find({ policyId: policy.id, - requestedBy: membership.id, + requestedByUserId: actorId, permissions: JSON.stringify(requestedPermissions), isTemporary }); @@ -153,7 +201,7 @@ export const accessApprovalRequestServiceFactory = ({ const approvalRequest = await accessApprovalRequestDAL.create( { policyId: policy.id, - requestedBy: membership.id, + requestedByUserId: actorId, temporaryRange: temporaryRange || null, permissions: JSON.stringify(requestedPermissions), isTemporary @@ -161,13 +209,36 @@ export const accessApprovalRequestServiceFactory = ({ tx ); + const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`; + const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`; + + await triggerSlackNotification({ + projectId: project.id, + projectSlackConfigDAL, + projectDAL, + kmsService, + notification: { + type: SlackTriggerFeature.ACCESS_REQUEST, + payload: { + projectName: project.name, + requesterFullName, + isTemporary, + requesterEmail: requestedByUser.email as string, + secretPath, + environment: envSlug, + permissions: accessTypes, + approvalUrl + } + } + }); + await smtpService.sendMail({ recipients: approverUsers.filter((approver) => approver.email).map((approver) => approver.email!), subjectLine: "Access Approval Request", substitutions: { projectName: project.name, - requesterFullName: `${requestedByUser.firstName} ${requestedByUser.lastName}`, + requesterFullName, requesterEmail: requestedByUser.email, isTemporary, ...(isTemporary && { @@ -176,7 +247,7 @@ export const accessApprovalRequestServiceFactory = ({ secretPath, environment: envSlug, permissions: accessTypes, - approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval` + approvalUrl }, template: SmtpTemplates.AccessApprovalRequest }); @@ -197,7 +268,7 @@ export const accessApprovalRequestServiceFactory = ({ actorAuthMethod }: TListApprovalRequestsDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new UnauthorizedError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const { membership } = await permissionService.getProjectPermission( actor, @@ -206,13 +277,15 @@ export const accessApprovalRequestServiceFactory = ({ actorAuthMethod, actorOrgId ); - if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" }); + if (!membership) { + throw new ForbiddenRequestError({ message: "You are not a member of this project" }); + } const policies = await accessApprovalPolicyDAL.find({ projectId: project.id }); let requests = await accessApprovalRequestDAL.findRequestsWithPrivilegeByPolicyIds(policies.map((p) => p.id)); if (authorProjectMembershipId) { - requests = requests.filter((request) => request.requestedBy === authorProjectMembershipId); + requests = requests.filter((request) => request.requestedByUserId === actorId); } if (envSlug) { @@ -231,7 +304,9 @@ export const accessApprovalRequestServiceFactory = ({ actorOrgId }: TReviewAccessRequestDTO) => { const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId); - if (!accessApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" }); + if (!accessApprovalRequest) { + throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` }); + } const { policy } = accessApprovalRequest; const { membership, hasRole } = await permissionService.getProjectPermission( @@ -242,28 +317,18 @@ export const accessApprovalRequestServiceFactory = ({ actorOrgId ); - if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" }); + if (!membership) { + throw new ForbiddenRequestError({ message: "You are not a member of this project" }); + } if ( !hasRole(ProjectMembershipRole.Admin) && - accessApprovalRequest.requestedBy !== membership.id && // The request wasn't made by the current user - !policy.approvers.find((approverId) => approverId === membership.id) // The request isn't performed by an assigned approver + accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user + !policy.approvers.find((approver) => approver.userId === actorId) // The request isn't performed by an assigned approver ) { - throw new UnauthorizedError({ message: "You are not authorized to approve this request" }); + throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" }); } - const reviewerProjectMembership = await projectMembershipDAL.findById(membership.id); - - await verifyApprovers({ - projectId: accessApprovalRequest.projectId, - orgId: actorOrgId, - envSlug: accessApprovalRequest.environment, - secretPath: accessApprovalRequest.policy.secretPath!, - actorAuthMethod, - permissionService, - userIds: [reviewerProjectMembership.userId] - }); - const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id }); if (existingReviews.some((review) => review.status === ApprovalStatus.REJECTED)) { throw new BadRequestError({ message: "The request has already been rejected by another reviewer" }); @@ -273,7 +338,7 @@ export const accessApprovalRequestServiceFactory = ({ const review = await accessApprovalRequestReviewerDAL.findOne( { requestId: accessApprovalRequest.id, - member: membership.id + reviewerUserId: actorId }, tx ); @@ -282,7 +347,7 @@ export const accessApprovalRequestServiceFactory = ({ { status, requestId: accessApprovalRequest.id, - member: membership.id + reviewerUserId: actorId }, tx ); @@ -303,7 +368,8 @@ export const accessApprovalRequestServiceFactory = ({ // Permanent access const privilege = await additionalPrivilegeDAL.create( { - projectMembershipId: accessApprovalRequest.requestedBy, + userId: accessApprovalRequest.requestedByUserId, + projectId: accessApprovalRequest.projectId, slug: `requested-privilege-${slugify(alphaNumericNanoId(12))}`, permissions: JSON.stringify(accessApprovalRequest.permissions) }, @@ -317,7 +383,8 @@ export const accessApprovalRequestServiceFactory = ({ const privilege = await additionalPrivilegeDAL.create( { - projectMembershipId: accessApprovalRequest.requestedBy, + userId: accessApprovalRequest.requestedByUserId, + projectId: accessApprovalRequest.projectId, slug: `requested-privilege-${slugify(alphaNumericNanoId(12))}`, permissions: JSON.stringify(accessApprovalRequest.permissions), isTemporary: true, @@ -344,7 +411,7 @@ export const accessApprovalRequestServiceFactory = ({ const getCount = async ({ projectSlug, actor, actorAuthMethod, actorId, actorOrgId }: TGetAccessRequestCountDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new UnauthorizedError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const { membership } = await permissionService.getProjectPermission( actor, @@ -353,7 +420,9 @@ export const accessApprovalRequestServiceFactory = ({ actorAuthMethod, actorOrgId ); - if (!membership) throw new BadRequestError({ message: "User not found in project" }); + if (!membership) { + throw new ForbiddenRequestError({ message: "You are not a member of this project" }); + } const count = await accessApprovalRequestDAL.getCount({ projectId: project.id }); diff --git a/backend/src/ee/services/audit-log-stream/audit-log-stream-service.ts b/backend/src/ee/services/audit-log-stream/audit-log-stream-service.ts index 0e313b59bb..4f080dac34 100644 --- a/backend/src/ee/services/audit-log-stream/audit-log-stream-service.ts +++ b/backend/src/ee/services/audit-log-stream/audit-log-stream-service.ts @@ -2,10 +2,11 @@ import { ForbiddenError } from "@casl/ability"; import { RawAxiosRequestHeaders } from "axios"; import { SecretKeyEncoding } from "@app/db/schemas"; +import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; -import { validateLocalIps } from "@app/lib/validator"; +import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue"; import { TLicenseServiceFactory } from "../license/license-service"; @@ -42,13 +43,15 @@ export const auditLogStreamServiceFactory = ({ actorOrgId, actorAuthMethod }: TCreateAuditLogStreamDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" }); + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" }); + const appCfg = getConfig(); const plan = await licenseService.getPlan(actorOrgId); - if (!plan.auditLogStreams) + if (!plan.auditLogStreams) { throw new BadRequestError({ message: "Failed to create audit log streams due to plan restriction. Upgrade plan to create group." }); + } const { permission } = await permissionService.getOrgPermission( actor, @@ -59,7 +62,9 @@ export const auditLogStreamServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings); - validateLocalIps(url); + if (appCfg.isCloud) { + blockLocalAndPrivateIpAddresses(url); + } const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId }); if (totalStreams.length >= plan.auditLogStreamLimit) { @@ -116,7 +121,7 @@ export const auditLogStreamServiceFactory = ({ actorOrgId, actorAuthMethod }: TUpdateAuditLogStreamDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" }); + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" }); const plan = await licenseService.getPlan(actorOrgId); if (!plan.auditLogStreams) @@ -125,13 +130,14 @@ export const auditLogStreamServiceFactory = ({ }); const logStream = await auditLogStreamDAL.findById(id); - if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" }); + if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` }); const { orgId } = logStream; const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings); - if (url) validateLocalIps(url); + const appCfg = getConfig(); + if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url); // testing connection first const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" }; @@ -173,10 +179,10 @@ export const auditLogStreamServiceFactory = ({ }; const deleteById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TDeleteAuditLogStreamDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" }); + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" }); const logStream = await auditLogStreamDAL.findById(id); - if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" }); + if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` }); const { orgId } = logStream; const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); @@ -188,7 +194,7 @@ export const auditLogStreamServiceFactory = ({ const getById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TGetDetailsAuditLogStreamDTO) => { const logStream = await auditLogStreamDAL.findById(id); - if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" }); + if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` }); const { orgId } = logStream; const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); diff --git a/backend/src/ee/services/audit-log/audit-log-dal.ts b/backend/src/ee/services/audit-log/audit-log-dal.ts index b3ad8c2b6e..b2c80aa0b5 100644 --- a/backend/src/ee/services/audit-log/audit-log-dal.ts +++ b/backend/src/ee/services/audit-log/audit-log-dal.ts @@ -1,9 +1,15 @@ -import { Knex } from "knex"; +// weird commonjs-related error in the CI requires us to do the import like this +import knex from "knex"; import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; -import { DatabaseError } from "@app/lib/errors"; -import { ormify, stripUndefinedInWhere } from "@app/lib/knex"; +import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; +import { ActorType } from "@app/services/auth/auth-type"; + +import { EventType } from "./audit-log-types"; export type TAuditLogDALFactory = ReturnType; @@ -23,45 +29,135 @@ export const auditLogDALFactory = (db: TDbClient) => { const auditLogOrm = ormify(db, TableName.AuditLog); const find = async ( - { orgId, projectId, userAgentType, startDate, endDate, limit = 20, offset = 0, actor, eventType }: TFindQuery, - tx?: Knex + { + orgId, + projectId, + userAgentType, + startDate, + endDate, + limit = 20, + offset = 0, + actorId, + actorType, + eventType, + eventMetadata + }: Omit & { + actorId?: string; + actorType?: ActorType; + eventType?: EventType[]; + eventMetadata?: Record; + }, + tx?: knex.Knex ) => { + if (!orgId && !projectId) { + throw new Error("Either orgId or projectId must be provided"); + } + try { - const sqlQuery = (tx || db)(TableName.AuditLog) - .where( - stripUndefinedInWhere({ - projectId, - orgId, - eventType, - actor, - userAgentType - }) - ) + // Find statements + const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog) + // eslint-disable-next-line func-names + .where(function () { + if (orgId) { + void this.where(`${TableName.AuditLog}.orgId`, orgId); + } else if (projectId) { + void this.where(`${TableName.AuditLog}.projectId`, projectId); + } + }); + + if (userAgentType) { + void sqlQuery.where("userAgentType", userAgentType); + } + + // Select statements + void sqlQuery + .select(selectAllTableCols(TableName.AuditLog)) .limit(limit) .offset(offset) - .orderBy("createdAt", "desc"); + .orderBy(`${TableName.AuditLog}.createdAt`, "desc"); + + // Special case: Filter by actor ID + if (actorId) { + void sqlQuery.whereRaw(`"actorMetadata" @> jsonb_build_object('userId', ?::text)`, [actorId]); + } + + // Special case: Filter by key/value pairs in eventMetadata field + if (eventMetadata && Object.keys(eventMetadata).length) { + Object.entries(eventMetadata).forEach(([key, value]) => { + void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object(?::text, ?::text)`, [key, value]); + }); + } + + // Filter by actor type + if (actorType) { + void sqlQuery.where("actor", actorType); + } + + // Filter by event types + if (eventType?.length) { + void sqlQuery.whereIn("eventType", eventType); + } + + // Filter by date range if (startDate) { - void sqlQuery.where("createdAt", ">=", startDate); + void sqlQuery.where(`${TableName.AuditLog}.createdAt`, ">=", startDate); } if (endDate) { - void sqlQuery.where("createdAt", "<=", endDate); + void sqlQuery.where(`${TableName.AuditLog}.createdAt`, "<=", endDate); } - const docs = await sqlQuery; + + // we timeout long running queries to prevent DB resource issues (2 minutes) + const docs = await sqlQuery.timeout(1000 * 120); + return docs; } catch (error) { + if (error instanceof knex.KnexTimeoutError) { + throw new GatewayTimeoutError({ + error, + message: "Failed to fetch audit logs due to timeout. Add more search filters." + }); + } + throw new DatabaseError({ error }); } }; // delete all audit log that have expired - const pruneAuditLog = async (tx?: Knex) => { - try { - const today = new Date(); - const docs = await (tx || db)(TableName.AuditLog).where("expiresAt", "<", today).del(); - return docs; - } catch (error) { - throw new DatabaseError({ error, name: "PruneAuditLog" }); - } + const pruneAuditLog = async (tx?: knex.Knex) => { + const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000; + const MAX_RETRY_ON_FAILURE = 3; + + const today = new Date(); + let deletedAuditLogIds: { id: string }[] = []; + let numberOfRetryOnFailure = 0; + let isRetrying = false; + + logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`); + do { + try { + const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog) + .where("expiresAt", "<", today) + .select("id") + .limit(AUDIT_LOG_PRUNE_BATCH_SIZE); + + // eslint-disable-next-line no-await-in-loop + deletedAuditLogIds = await (tx || db)(TableName.AuditLog) + .whereIn("id", findExpiredLogSubQuery) + .del() + .returning("id"); + numberOfRetryOnFailure = 0; // reset + } catch (error) { + numberOfRetryOnFailure += 1; + logger.error(error, "Failed to delete audit log on pruning"); + } finally { + // eslint-disable-next-line no-await-in-loop + await new Promise((resolve) => { + setTimeout(resolve, 10); // time to breathe for db + }); + } + isRetrying = numberOfRetryOnFailure > 0; + } while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE)); + logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`); }; return { ...auditLogOrm, pruneAuditLog, find }; diff --git a/backend/src/ee/services/audit-log/audit-log-queue.ts b/backend/src/ee/services/audit-log/audit-log-queue.ts index f93b391a59..83a2fafa6f 100644 --- a/backend/src/ee/services/audit-log/audit-log-queue.ts +++ b/backend/src/ee/services/audit-log/audit-log-queue.ts @@ -45,24 +45,36 @@ export const auditLogQueueServiceFactory = ({ const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data; let { orgId } = job.data; const MS_IN_DAY = 24 * 60 * 60 * 1000; + let project; if (!orgId) { // it will never be undefined for both org and project id // TODO(akhilmhdh): use caching here in dal to avoid db calls - const project = await projectDAL.findById(projectId as string); + project = await projectDAL.findById(projectId as string); orgId = project.orgId; } const plan = await licenseService.getPlan(orgId); - const ttl = plan.auditLogsRetentionDays * MS_IN_DAY; - // skip inserting if audit log retention is 0 meaning its not supported - if (ttl === 0) return; + if (plan.auditLogsRetentionDays === 0) { + // skip inserting if audit log retention is 0 meaning its not supported + return; + } + + // For project actions, set TTL to project-level audit log retention config + // This condition ensures that the plan's audit log retention days cannot be bypassed + const ttlInDays = + project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays + ? project.auditLogsRetentionDays + : plan.auditLogsRetentionDays; + + const ttl = ttlInDays * MS_IN_DAY; const auditLog = await auditLogDAL.create({ actor: actor.type, actorMetadata: actor.metadata, userAgent, projectId, + projectName: project?.name, ipAddress, orgId, eventType: event.type, diff --git a/backend/src/ee/services/audit-log/audit-log-service.ts b/backend/src/ee/services/audit-log/audit-log-service.ts index 1564c6dcb5..747c53c1af 100644 --- a/backend/src/ee/services/audit-log/audit-log-service.ts +++ b/backend/src/ee/services/audit-log/audit-log-service.ts @@ -1,7 +1,9 @@ import { ForbiddenError } from "@casl/ability"; +import { getConfig } from "@app/lib/config/env"; import { BadRequestError } from "@app/lib/errors"; +import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission"; import { TPermissionServiceFactory } from "../permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission"; import { TAuditLogDALFactory } from "./audit-log-dal"; @@ -10,7 +12,7 @@ import { EventType, TCreateAuditLogDTO, TListProjectAuditLogDTO } from "./audit- type TAuditLogServiceFactoryDep = { auditLogDAL: TAuditLogDALFactory; - permissionService: Pick; + permissionService: Pick; auditLogQueue: TAuditLogQueueServiceFactory; }; @@ -21,38 +23,48 @@ export const auditLogServiceFactory = ({ auditLogQueue, permissionService }: TAuditLogServiceFactoryDep) => { - const listProjectAuditLogs = async ({ - userAgentType, - eventType, - offset, - limit, - endDate, - startDate, - actor, - actorId, - actorOrgId, - actorAuthMethod, - projectId, - auditLogActor - }: TListProjectAuditLogDTO) => { - const { permission } = await permissionService.getProjectPermission( - actor, - actorId, - projectId, - actorAuthMethod, - actorOrgId - ); - ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs); + const listAuditLogs = async ({ actorAuthMethod, actorId, actorOrgId, actor, filter }: TListProjectAuditLogDTO) => { + // Filter logs for specific project + if (filter.projectId) { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + filter.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs); + } else { + // Organization-wide logs + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + + /** + * NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved + * to the organization level ✅ + */ + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs); + } + + // If project ID is not provided, then we need to return all the audit logs for the organization itself. const auditLogs = await auditLogDAL.find({ - startDate, - endDate, - limit, - offset, - eventType, - userAgentType, - actor: auditLogActor, - projectId + startDate: filter.startDate, + endDate: filter.endDate, + limit: filter.limit, + offset: filter.offset, + eventType: filter.eventType, + userAgentType: filter.userAgentType, + actorId: filter.auditLogActorId, + actorType: filter.actorType, + eventMetadata: filter.eventMetadata, + ...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId }) }); + return auditLogs.map(({ eventType: logEventType, actor: eActor, actorMetadata, eventMetadata, ...el }) => ({ ...el, event: { type: logEventType, metadata: eventMetadata }, @@ -61,6 +73,10 @@ export const auditLogServiceFactory = ({ }; const createAuditLog = async (data: TCreateAuditLogDTO) => { + const appCfg = getConfig(); + if (appCfg.DISABLE_AUDIT_LOG_GENERATION) { + return; + } // add all cases in which project id or org id cannot be added if (data.event.type !== EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH) { if (!data.projectId && !data.orgId) throw new BadRequestError({ message: "Must either project id or org id" }); @@ -71,6 +87,6 @@ export const auditLogServiceFactory = ({ return { createAuditLog, - listProjectAuditLogs + listAuditLogs }; }; diff --git a/backend/src/ee/services/audit-log/audit-log-types.ts b/backend/src/ee/services/audit-log/audit-log-types.ts index 4158149982..51090e594d 100644 --- a/backend/src/ee/services/audit-log/audit-log-types.ts +++ b/backend/src/ee/services/audit-log/audit-log-types.ts @@ -1,21 +1,32 @@ +import { + TCreateProjectTemplateDTO, + TUpdateProjectTemplateDTO +} from "@app/ee/services/project-template/project-template-types"; +import { SymmetricEncryption } from "@app/lib/crypto/cipher"; import { TProjectPermission } from "@app/lib/types"; import { ActorType } from "@app/services/auth/auth-type"; +import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types"; import { TIdentityTrustedIp } from "@app/services/identity/identity-types"; +import { PkiItemType } from "@app/services/pki-collection/pki-collection-types"; export type TListProjectAuditLogDTO = { - auditLogActor?: string; - projectId: string; - eventType?: string; - startDate?: string; - endDate?: string; - userAgentType?: string; - limit?: number; - offset?: number; -} & TProjectPermission; + filter: { + userAgentType?: UserAgentType; + eventType?: EventType[]; + offset?: number; + limit: number; + endDate?: string; + startDate?: string; + projectId?: string; + auditLogActorId?: string; + actorType?: ActorType; + eventMetadata?: Record; + }; +} & Omit; export type TCreateAuditLogDTO = { event: Event; - actor: UserActor | IdentityActor | ServiceActor | ScimClientActor; + actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor; orgId?: string; projectId?: string; } & BaseAuthData; @@ -44,6 +55,7 @@ export enum EventType { CREATE_SECRETS = "create-secrets", UPDATE_SECRET = "update-secret", UPDATE_SECRETS = "update-secrets", + MOVE_SECRETS = "move-secrets", DELETE_SECRET = "delete-secret", DELETE_SECRETS = "delete-secrets", GET_WORKSPACE_KEY = "get-workspace-key", @@ -64,28 +76,47 @@ export enum EventType { ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth", UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth", GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth", + REVOKE_IDENTITY_UNIVERSAL_AUTH = "revoke-identity-universal-auth", + CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth", + UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth", + GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth", + ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth", + UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth", + GET_IDENTITY_TOKEN_AUTH = "get-identity-token-auth", + REVOKE_IDENTITY_TOKEN_AUTH = "revoke-identity-token-auth", LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth", ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth", UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth", GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth", + REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth", + LOGIN_IDENTITY_OIDC_AUTH = "login-identity-oidc-auth", + ADD_IDENTITY_OIDC_AUTH = "add-identity-oidc-auth", + UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth", + GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth", + REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth", CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret", REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret", GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret", + GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id", LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth", ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth", UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth", + REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth", GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth", LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth", ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth", UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth", + REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth", GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth", LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth", ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth", UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth", GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth", + REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth", CREATE_ENVIRONMENT = "create-environment", UPDATE_ENVIRONMENT = "update-environment", DELETE_ENVIRONMENT = "delete-environment", + GET_ENVIRONMENT = "get-environment", ADD_WORKSPACE_MEMBER = "add-workspace-member", ADD_BATCH_WORKSPACE_MEMBER = "add-workspace-members", REMOVE_WORKSPACE_MEMBER = "remove-workspace-member", @@ -96,6 +127,7 @@ export enum EventType { UPDATE_WEBHOOK_STATUS = "update-webhook-status", DELETE_WEBHOOK = "delete-webhook", GET_SECRET_IMPORTS = "get-secret-imports", + GET_SECRET_IMPORT = "get-secret-import", CREATE_SECRET_IMPORT = "create-secret-import", UPDATE_SECRET_IMPORT = "update-secret-import", DELETE_SECRET_IMPORT = "delete-secret-import", @@ -104,7 +136,73 @@ export enum EventType { SECRET_APPROVAL_MERGED = "secret-approval-merged", SECRET_APPROVAL_REQUEST = "secret-approval-request", SECRET_APPROVAL_CLOSED = "secret-approval-closed", - SECRET_APPROVAL_REOPENED = "secret-approval-reopened" + SECRET_APPROVAL_REOPENED = "secret-approval-reopened", + CREATE_CA = "create-certificate-authority", + GET_CA = "get-certificate-authority", + UPDATE_CA = "update-certificate-authority", + DELETE_CA = "delete-certificate-authority", + RENEW_CA = "renew-certificate-authority", + GET_CA_CSR = "get-certificate-authority-csr", + GET_CA_CERTS = "get-certificate-authority-certs", + GET_CA_CERT = "get-certificate-authority-cert", + SIGN_INTERMEDIATE = "sign-intermediate", + IMPORT_CA_CERT = "import-certificate-authority-cert", + GET_CA_CRLS = "get-certificate-authority-crls", + ISSUE_CERT = "issue-cert", + SIGN_CERT = "sign-cert", + GET_CA_CERTIFICATE_TEMPLATES = "get-ca-certificate-templates", + GET_CERT = "get-cert", + DELETE_CERT = "delete-cert", + REVOKE_CERT = "revoke-cert", + GET_CERT_BODY = "get-cert-body", + CREATE_PKI_ALERT = "create-pki-alert", + GET_PKI_ALERT = "get-pki-alert", + UPDATE_PKI_ALERT = "update-pki-alert", + DELETE_PKI_ALERT = "delete-pki-alert", + CREATE_PKI_COLLECTION = "create-pki-collection", + GET_PKI_COLLECTION = "get-pki-collection", + UPDATE_PKI_COLLECTION = "update-pki-collection", + DELETE_PKI_COLLECTION = "delete-pki-collection", + GET_PKI_COLLECTION_ITEMS = "get-pki-collection-items", + ADD_PKI_COLLECTION_ITEM = "add-pki-collection-item", + DELETE_PKI_COLLECTION_ITEM = "delete-pki-collection-item", + CREATE_KMS = "create-kms", + UPDATE_KMS = "update-kms", + DELETE_KMS = "delete-kms", + GET_KMS = "get-kms", + UPDATE_PROJECT_KMS = "update-project-kms", + GET_PROJECT_KMS_BACKUP = "get-project-kms-backup", + LOAD_PROJECT_KMS_BACKUP = "load-project-kms-backup", + ORG_ADMIN_ACCESS_PROJECT = "org-admin-accessed-project", + CREATE_CERTIFICATE_TEMPLATE = "create-certificate-template", + UPDATE_CERTIFICATE_TEMPLATE = "update-certificate-template", + DELETE_CERTIFICATE_TEMPLATE = "delete-certificate-template", + GET_CERTIFICATE_TEMPLATE = "get-certificate-template", + CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "create-certificate-template-est-config", + UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "update-certificate-template-est-config", + GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config", + ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration", + ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration", + GET_SLACK_INTEGRATION = "get-slack-integration", + UPDATE_SLACK_INTEGRATION = "update-slack-integration", + DELETE_SLACK_INTEGRATION = "delete-slack-integration", + GET_PROJECT_SLACK_CONFIG = "get-project-slack-config", + UPDATE_PROJECT_SLACK_CONFIG = "update-project-slack-config", + INTEGRATION_SYNCED = "integration-synced", + CREATE_CMEK = "create-cmek", + UPDATE_CMEK = "update-cmek", + DELETE_CMEK = "delete-cmek", + GET_CMEKS = "get-cmeks", + CMEK_ENCRYPT = "cmek-encrypt", + CMEK_DECRYPT = "cmek-decrypt", + UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping", + GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping", + GET_PROJECT_TEMPLATES = "get-project-templates", + GET_PROJECT_TEMPLATE = "get-project-template", + CREATE_PROJECT_TEMPLATE = "create-project-template", + UPDATE_PROJECT_TEMPLATE = "update-project-template", + DELETE_PROJECT_TEMPLATE = "delete-project-template", + APPLY_PROJECT_TEMPLATE = "apply-project-template" } interface UserActorMetadata { @@ -125,6 +223,8 @@ interface IdentityActorMetadata { interface ScimClientActorMetadata {} +interface PlatformActorMetadata {} + export interface UserActor { type: ActorType.USER; metadata: UserActorMetadata; @@ -135,6 +235,11 @@ export interface ServiceActor { metadata: ServiceActorMetadata; } +export interface PlatformActor { + type: ActorType.PLATFORM; + metadata: PlatformActorMetadata; +} + export interface IdentityActor { type: ActorType.IDENTITY; metadata: IdentityActorMetadata; @@ -145,7 +250,7 @@ export interface ScimClientActor { metadata: ScimClientActorMetadata; } -export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor; +export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor; interface GetSecretsEvent { type: EventType.GET_SECRETS; @@ -207,6 +312,17 @@ interface UpdateSecretBatchEvent { }; } +interface MoveSecretsEvent { + type: EventType.MOVE_SECRETS; + metadata: { + sourceEnvironment: string; + sourceSecretPath: string; + destinationEnvironment: string; + destinationSecretPath: string; + secretIds: string[]; + }; +} + interface DeleteSecretEvent { type: EventType.DELETE_SECRET; metadata: { @@ -283,6 +399,7 @@ interface DeleteIntegrationEvent { targetServiceId?: string; path?: string; region?: string; + shouldDeleteIntegrationSecrets?: boolean; }; } @@ -419,6 +536,73 @@ interface GetIdentityUniversalAuthEvent { }; } +interface DeleteIdentityUniversalAuthEvent { + type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH; + metadata: { + identityId: string; + }; +} + +interface CreateTokenIdentityTokenAuthEvent { + type: EventType.CREATE_TOKEN_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + identityAccessTokenId: string; + }; +} + +interface UpdateTokenIdentityTokenAuthEvent { + type: EventType.UPDATE_TOKEN_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + tokenId: string; + name?: string; + }; +} + +interface GetTokensIdentityTokenAuthEvent { + type: EventType.GET_TOKENS_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + }; +} + +interface AddIdentityTokenAuthEvent { + type: EventType.ADD_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: Array; + }; +} + +interface UpdateIdentityTokenAuthEvent { + type: EventType.UPDATE_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + accessTokenTTL?: number; + accessTokenMaxTTL?: number; + accessTokenNumUsesLimit?: number; + accessTokenTrustedIps?: Array; + }; +} + +interface GetIdentityTokenAuthEvent { + type: EventType.GET_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + }; +} + +interface DeleteIdentityTokenAuthEvent { + type: EventType.REVOKE_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + }; +} + interface LoginIdentityKubernetesAuthEvent { type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH; metadata: { @@ -442,6 +626,13 @@ interface AddIdentityKubernetesAuthEvent { }; } +interface DeleteIdentityKubernetesAuthEvent { + type: EventType.REVOKE_IDENTITY_KUBERNETES_AUTH; + metadata: { + identityId: string; + }; +} + interface UpdateIdentityKubernetesAuthEvent { type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH; metadata: { @@ -478,6 +669,14 @@ interface GetIdentityUniversalAuthClientSecretsEvent { }; } +interface GetIdentityUniversalAuthClientSecretByIdEvent { + type: EventType.GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID; + metadata: { + identityId: string; + clientSecretId: string; + }; +} + interface RevokeIdentityUniversalAuthClientSecretEvent { type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET; metadata: { @@ -510,6 +709,13 @@ interface AddIdentityGcpAuthEvent { }; } +interface DeleteIdentityGcpAuthEvent { + type: EventType.REVOKE_IDENTITY_GCP_AUTH; + metadata: { + identityId: string; + }; +} + interface UpdateIdentityGcpAuthEvent { type: EventType.UPDATE_IDENTITY_GCP_AUTH; metadata: { @@ -555,6 +761,13 @@ interface AddIdentityAwsAuthEvent { }; } +interface DeleteIdentityAwsAuthEvent { + type: EventType.REVOKE_IDENTITY_AWS_AUTH; + metadata: { + identityId: string; + }; +} + interface UpdateIdentityAwsAuthEvent { type: EventType.UPDATE_IDENTITY_AWS_AUTH; metadata: { @@ -598,6 +811,13 @@ interface AddIdentityAzureAuthEvent { }; } +interface DeleteIdentityAzureAuthEvent { + type: EventType.REVOKE_IDENTITY_AZURE_AUTH; + metadata: { + identityId: string; + }; +} + interface UpdateIdentityAzureAuthEvent { type: EventType.UPDATE_IDENTITY_AZURE_AUTH; metadata: { @@ -618,6 +838,63 @@ interface GetIdentityAzureAuthEvent { }; } +interface LoginIdentityOidcAuthEvent { + type: EventType.LOGIN_IDENTITY_OIDC_AUTH; + metadata: { + identityId: string; + identityOidcAuthId: string; + identityAccessTokenId: string; + }; +} + +interface AddIdentityOidcAuthEvent { + type: EventType.ADD_IDENTITY_OIDC_AUTH; + metadata: { + identityId: string; + oidcDiscoveryUrl: string; + caCert: string; + boundIssuer: string; + boundAudiences: string; + boundClaims: Record; + boundSubject: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: Array; + }; +} + +interface DeleteIdentityOidcAuthEvent { + type: EventType.REVOKE_IDENTITY_OIDC_AUTH; + metadata: { + identityId: string; + }; +} + +interface UpdateIdentityOidcAuthEvent { + type: EventType.UPDATE_IDENTITY_OIDC_AUTH; + metadata: { + identityId: string; + oidcDiscoveryUrl?: string; + caCert?: string; + boundIssuer?: string; + boundAudiences?: string; + boundClaims?: Record; + boundSubject?: string; + accessTokenTTL?: number; + accessTokenMaxTTL?: number; + accessTokenNumUsesLimit?: number; + accessTokenTrustedIps?: Array; + }; +} + +interface GetIdentityOidcAuthEvent { + type: EventType.GET_IDENTITY_OIDC_AUTH; + metadata: { + identityId: string; + }; +} + interface CreateEnvironmentEvent { type: EventType.CREATE_ENVIRONMENT; metadata: { @@ -626,6 +903,13 @@ interface CreateEnvironmentEvent { }; } +interface GetEnvironmentEvent { + type: EventType.GET_ENVIRONMENT; + metadata: { + id: string; + }; +} + interface UpdateEnvironmentEvent { type: EventType.UPDATE_ENVIRONMENT; metadata: { @@ -707,7 +991,6 @@ interface CreateWebhookEvent { webhookId: string; environment: string; secretPath: string; - webhookUrl: string; isDisabled: boolean; }; } @@ -718,7 +1001,6 @@ interface UpdateWebhookStatusEvent { webhookId: string; environment: string; secretPath: string; - webhookUrl: string; isDisabled: boolean; }; } @@ -729,7 +1011,6 @@ interface DeleteWebhookEvent { webhookId: string; environment: string; secretPath: string; - webhookUrl: string; isDisabled: boolean; }; } @@ -743,6 +1024,14 @@ interface GetSecretImportsEvent { }; } +interface GetSecretImportEvent { + type: EventType.GET_SECRET_IMPORT; + metadata: { + secretImportId: string; + folderId: string; + }; +} + interface CreateSecretImportEvent { type: EventType.CREATE_SECRET_IMPORT; metadata: { @@ -843,6 +1132,542 @@ interface SecretApprovalRequest { }; } +interface CreateCa { + type: EventType.CREATE_CA; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCa { + type: EventType.GET_CA; + metadata: { + caId: string; + dn: string; + }; +} + +interface UpdateCa { + type: EventType.UPDATE_CA; + metadata: { + caId: string; + dn: string; + status: CaStatus; + }; +} + +interface DeleteCa { + type: EventType.DELETE_CA; + metadata: { + caId: string; + dn: string; + }; +} + +interface RenewCa { + type: EventType.RENEW_CA; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCaCsr { + type: EventType.GET_CA_CSR; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCaCerts { + type: EventType.GET_CA_CERTS; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCaCert { + type: EventType.GET_CA_CERT; + metadata: { + caId: string; + dn: string; + }; +} + +interface SignIntermediate { + type: EventType.SIGN_INTERMEDIATE; + metadata: { + caId: string; + dn: string; + serialNumber: string; + }; +} + +interface ImportCaCert { + type: EventType.IMPORT_CA_CERT; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCaCrls { + type: EventType.GET_CA_CRLS; + metadata: { + caId: string; + dn: string; + }; +} + +interface IssueCert { + type: EventType.ISSUE_CERT; + metadata: { + caId: string; + dn: string; + serialNumber: string; + }; +} + +interface SignCert { + type: EventType.SIGN_CERT; + metadata: { + caId: string; + dn: string; + serialNumber: string; + }; +} + +interface GetCaCertificateTemplates { + type: EventType.GET_CA_CERTIFICATE_TEMPLATES; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCert { + type: EventType.GET_CERT; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface DeleteCert { + type: EventType.DELETE_CERT; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface RevokeCert { + type: EventType.REVOKE_CERT; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface GetCertBody { + type: EventType.GET_CERT_BODY; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface CreatePkiAlert { + type: EventType.CREATE_PKI_ALERT; + metadata: { + pkiAlertId: string; + pkiCollectionId: string; + name: string; + alertBeforeDays: number; + recipientEmails: string; + }; +} +interface GetPkiAlert { + type: EventType.GET_PKI_ALERT; + metadata: { + pkiAlertId: string; + }; +} + +interface UpdatePkiAlert { + type: EventType.UPDATE_PKI_ALERT; + metadata: { + pkiAlertId: string; + pkiCollectionId?: string; + name?: string; + alertBeforeDays?: number; + recipientEmails?: string; + }; +} +interface DeletePkiAlert { + type: EventType.DELETE_PKI_ALERT; + metadata: { + pkiAlertId: string; + }; +} + +interface CreatePkiCollection { + type: EventType.CREATE_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + name: string; + }; +} + +interface GetPkiCollection { + type: EventType.GET_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + }; +} + +interface UpdatePkiCollection { + type: EventType.UPDATE_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + name?: string; + }; +} + +interface DeletePkiCollection { + type: EventType.DELETE_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + }; +} + +interface GetPkiCollectionItems { + type: EventType.GET_PKI_COLLECTION_ITEMS; + metadata: { + pkiCollectionId: string; + }; +} + +interface AddPkiCollectionItem { + type: EventType.ADD_PKI_COLLECTION_ITEM; + metadata: { + pkiCollectionItemId: string; + pkiCollectionId: string; + type: PkiItemType; + itemId: string; + }; +} + +interface DeletePkiCollectionItem { + type: EventType.DELETE_PKI_COLLECTION_ITEM; + metadata: { + pkiCollectionItemId: string; + pkiCollectionId: string; + }; +} + +interface CreateKmsEvent { + type: EventType.CREATE_KMS; + metadata: { + kmsId: string; + provider: string; + name: string; + description?: string; + }; +} + +interface DeleteKmsEvent { + type: EventType.DELETE_KMS; + metadata: { + kmsId: string; + name: string; + }; +} + +interface UpdateKmsEvent { + type: EventType.UPDATE_KMS; + metadata: { + kmsId: string; + provider: string; + name?: string; + description?: string; + }; +} + +interface GetKmsEvent { + type: EventType.GET_KMS; + metadata: { + kmsId: string; + name: string; + }; +} + +interface UpdateProjectKmsEvent { + type: EventType.UPDATE_PROJECT_KMS; + metadata: { + secretManagerKmsKey: { + id: string; + name: string; + }; + }; +} + +interface GetProjectKmsBackupEvent { + type: EventType.GET_PROJECT_KMS_BACKUP; + metadata: Record; // no metadata yet +} + +interface LoadProjectKmsBackupEvent { + type: EventType.LOAD_PROJECT_KMS_BACKUP; + metadata: Record; // no metadata yet +} + +interface CreateCertificateTemplate { + type: EventType.CREATE_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + caId: string; + pkiCollectionId?: string; + name: string; + commonName: string; + subjectAlternativeName: string; + ttl: string; + }; +} + +interface GetCertificateTemplate { + type: EventType.GET_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + }; +} + +interface UpdateCertificateTemplate { + type: EventType.UPDATE_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + caId: string; + pkiCollectionId?: string; + name: string; + commonName: string; + subjectAlternativeName: string; + ttl: string; + }; +} + +interface DeleteCertificateTemplate { + type: EventType.DELETE_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + }; +} + +interface OrgAdminAccessProjectEvent { + type: EventType.ORG_ADMIN_ACCESS_PROJECT; + metadata: { + userId: string; + username: string; + email: string; + projectId: string; + }; // no metadata yet +} + +interface CreateCertificateTemplateEstConfig { + type: EventType.CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG; + metadata: { + certificateTemplateId: string; + isEnabled: boolean; + }; +} + +interface UpdateCertificateTemplateEstConfig { + type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG; + metadata: { + certificateTemplateId: string; + isEnabled: boolean; + }; +} + +interface GetCertificateTemplateEstConfig { + type: EventType.GET_CERTIFICATE_TEMPLATE_EST_CONFIG; + metadata: { + certificateTemplateId: string; + }; +} + +interface AttemptCreateSlackIntegration { + type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION; + metadata: { + slug: string; + description?: string; + }; +} + +interface AttemptReinstallSlackIntegration { + type: EventType.ATTEMPT_REINSTALL_SLACK_INTEGRATION; + metadata: { + id: string; + }; +} + +interface UpdateSlackIntegration { + type: EventType.UPDATE_SLACK_INTEGRATION; + metadata: { + id: string; + slug: string; + description?: string; + }; +} + +interface DeleteSlackIntegration { + type: EventType.DELETE_SLACK_INTEGRATION; + metadata: { + id: string; + }; +} + +interface GetSlackIntegration { + type: EventType.GET_SLACK_INTEGRATION; + metadata: { + id: string; + }; +} + +interface UpdateProjectSlackConfig { + type: EventType.UPDATE_PROJECT_SLACK_CONFIG; + metadata: { + id: string; + slackIntegrationId: string; + isAccessRequestNotificationEnabled: boolean; + accessRequestChannels: string; + isSecretRequestNotificationEnabled: boolean; + secretRequestChannels: string; + }; +} + +interface GetProjectSlackConfig { + type: EventType.GET_PROJECT_SLACK_CONFIG; + metadata: { + id: string; + }; +} +interface IntegrationSyncedEvent { + type: EventType.INTEGRATION_SYNCED; + metadata: { + integrationId: string; + lastSyncJobId: string; + lastUsed: Date; + syncMessage: string; + isSynced: boolean; + }; +} + +interface CreateCmekEvent { + type: EventType.CREATE_CMEK; + metadata: { + keyId: string; + name: string; + description?: string; + encryptionAlgorithm: SymmetricEncryption; + }; +} + +interface DeleteCmekEvent { + type: EventType.DELETE_CMEK; + metadata: { + keyId: string; + }; +} + +interface UpdateCmekEvent { + type: EventType.UPDATE_CMEK; + metadata: { + keyId: string; + name?: string; + description?: string; + }; +} + +interface GetCmeksEvent { + type: EventType.GET_CMEKS; + metadata: { + keyIds: string[]; + }; +} + +interface CmekEncryptEvent { + type: EventType.CMEK_ENCRYPT; + metadata: { + keyId: string; + }; +} + +interface CmekDecryptEvent { + type: EventType.CMEK_DECRYPT; + metadata: { + keyId: string; + }; +} + +interface GetExternalGroupOrgRoleMappingsEvent { + type: EventType.GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS; + metadata?: Record; // not needed, based off orgId +} + +interface UpdateExternalGroupOrgRoleMappingsEvent { + type: EventType.UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS; + metadata: { + mappings: { groupName: string; roleSlug: string }[]; + }; +} + +interface GetProjectTemplatesEvent { + type: EventType.GET_PROJECT_TEMPLATES; + metadata: { + count: number; + templateIds: string[]; + }; +} + +interface GetProjectTemplateEvent { + type: EventType.GET_PROJECT_TEMPLATE; + metadata: { + templateId: string; + }; +} + +interface CreateProjectTemplateEvent { + type: EventType.CREATE_PROJECT_TEMPLATE; + metadata: TCreateProjectTemplateDTO; +} + +interface UpdateProjectTemplateEvent { + type: EventType.UPDATE_PROJECT_TEMPLATE; + metadata: TUpdateProjectTemplateDTO & { templateId: string }; +} + +interface DeleteProjectTemplateEvent { + type: EventType.DELETE_PROJECT_TEMPLATE; + metadata: { + templateId: string; + }; +} + +interface ApplyProjectTemplateEvent { + type: EventType.APPLY_PROJECT_TEMPLATE; + metadata: { + template: string; + projectId: string; + }; +} + export type Event = | GetSecretsEvent | GetSecretEvent @@ -850,6 +1675,7 @@ export type Event = | CreateSecretBatchEvent | UpdateSecretEvent | UpdateSecretBatchEvent + | MoveSecretsEvent | DeleteSecretEvent | DeleteSecretBatchEvent | GetWorkspaceKeyEvent @@ -869,27 +1695,46 @@ export type Event = | LoginIdentityUniversalAuthEvent | AddIdentityUniversalAuthEvent | UpdateIdentityUniversalAuthEvent + | DeleteIdentityUniversalAuthEvent | GetIdentityUniversalAuthEvent + | CreateTokenIdentityTokenAuthEvent + | UpdateTokenIdentityTokenAuthEvent + | GetTokensIdentityTokenAuthEvent + | AddIdentityTokenAuthEvent + | UpdateIdentityTokenAuthEvent + | GetIdentityTokenAuthEvent + | DeleteIdentityTokenAuthEvent | LoginIdentityKubernetesAuthEvent + | DeleteIdentityKubernetesAuthEvent | AddIdentityKubernetesAuthEvent | UpdateIdentityKubernetesAuthEvent | GetIdentityKubernetesAuthEvent | CreateIdentityUniversalAuthClientSecretEvent | GetIdentityUniversalAuthClientSecretsEvent + | GetIdentityUniversalAuthClientSecretByIdEvent | RevokeIdentityUniversalAuthClientSecretEvent | LoginIdentityGcpAuthEvent | AddIdentityGcpAuthEvent + | DeleteIdentityGcpAuthEvent | UpdateIdentityGcpAuthEvent | GetIdentityGcpAuthEvent | LoginIdentityAwsAuthEvent | AddIdentityAwsAuthEvent | UpdateIdentityAwsAuthEvent | GetIdentityAwsAuthEvent + | DeleteIdentityAwsAuthEvent | LoginIdentityAzureAuthEvent | AddIdentityAzureAuthEvent + | DeleteIdentityAzureAuthEvent | UpdateIdentityAzureAuthEvent | GetIdentityAzureAuthEvent + | LoginIdentityOidcAuthEvent + | AddIdentityOidcAuthEvent + | DeleteIdentityOidcAuthEvent + | UpdateIdentityOidcAuthEvent + | GetIdentityOidcAuthEvent | CreateEnvironmentEvent + | GetEnvironmentEvent | UpdateEnvironmentEvent | DeleteEnvironmentEvent | AddWorkspaceMemberEvent @@ -902,6 +1747,7 @@ export type Event = | UpdateWebhookStatusEvent | DeleteWebhookEvent | GetSecretImportsEvent + | GetSecretImportEvent | CreateSecretImportEvent | UpdateSecretImportEvent | DeleteSecretImportEvent @@ -910,4 +1756,70 @@ export type Event = | SecretApprovalMerge | SecretApprovalClosed | SecretApprovalRequest - | SecretApprovalReopened; + | SecretApprovalReopened + | CreateCa + | GetCa + | UpdateCa + | DeleteCa + | RenewCa + | GetCaCsr + | GetCaCerts + | GetCaCert + | SignIntermediate + | ImportCaCert + | GetCaCrls + | IssueCert + | SignCert + | GetCaCertificateTemplates + | GetCert + | DeleteCert + | RevokeCert + | GetCertBody + | CreatePkiAlert + | GetPkiAlert + | UpdatePkiAlert + | DeletePkiAlert + | CreatePkiCollection + | GetPkiCollection + | UpdatePkiCollection + | DeletePkiCollection + | GetPkiCollectionItems + | AddPkiCollectionItem + | DeletePkiCollectionItem + | CreateKmsEvent + | UpdateKmsEvent + | DeleteKmsEvent + | GetKmsEvent + | UpdateProjectKmsEvent + | GetProjectKmsBackupEvent + | LoadProjectKmsBackupEvent + | OrgAdminAccessProjectEvent + | CreateCertificateTemplate + | UpdateCertificateTemplate + | GetCertificateTemplate + | DeleteCertificateTemplate + | CreateCertificateTemplateEstConfig + | UpdateCertificateTemplateEstConfig + | GetCertificateTemplateEstConfig + | AttemptCreateSlackIntegration + | AttemptReinstallSlackIntegration + | UpdateSlackIntegration + | DeleteSlackIntegration + | GetSlackIntegration + | UpdateProjectSlackConfig + | GetProjectSlackConfig + | IntegrationSyncedEvent + | CreateCmekEvent + | UpdateCmekEvent + | DeleteCmekEvent + | GetCmeksEvent + | CmekEncryptEvent + | CmekDecryptEvent + | GetExternalGroupOrgRoleMappingsEvent + | UpdateExternalGroupOrgRoleMappingsEvent + | GetProjectTemplatesEvent + | GetProjectTemplateEvent + | CreateProjectTemplateEvent + | UpdateProjectTemplateEvent + | DeleteProjectTemplateEvent + | ApplyProjectTemplateEvent; diff --git a/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-dal.ts b/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-dal.ts new file mode 100644 index 0000000000..d367e1616e --- /dev/null +++ b/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateAuthorityCrlDALFactory = ReturnType; + +export const certificateAuthorityCrlDALFactory = (db: TDbClient) => { + const caCrlOrm = ormify(db, TableName.CertificateAuthorityCrl); + return caCrlOrm; +}; diff --git a/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-service.ts b/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-service.ts new file mode 100644 index 0000000000..7282b0a291 --- /dev/null +++ b/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-service.ts @@ -0,0 +1,200 @@ +import { ForbiddenError } from "@casl/ability"; +import * as x509 from "@peculiar/x509"; + +import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { NotFoundError } from "@app/lib/errors"; +import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { TGetCaCrlsDTO, TGetCrlById } from "./certificate-authority-crl-types"; + +type TCertificateAuthorityCrlServiceFactoryDep = { + certificateAuthorityDAL: Pick; + certificateAuthorityCrlDAL: Pick; + projectDAL: Pick; + kmsService: Pick; + permissionService: Pick; +}; + +export type TCertificateAuthorityCrlServiceFactory = ReturnType; + +export const certificateAuthorityCrlServiceFactory = ({ + certificateAuthorityDAL, + certificateAuthorityCrlDAL, + projectDAL, + kmsService, + permissionService // licenseService +}: TCertificateAuthorityCrlServiceFactoryDep) => { + /** + * Return CRL with id [crlId] + */ + const getCrlById = async (crlId: TGetCrlById) => { + const caCrl = await certificateAuthorityCrlDAL.findById(crlId); + if (!caCrl) throw new NotFoundError({ message: `CRL with ID '${crlId}' not found` }); + + const ca = await certificateAuthorityDAL.findById(caCrl.caId); + + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + + const decryptedCrl = await kmsDecryptor({ cipherTextBlob: caCrl.encryptedCrl }); + + const crl = new x509.X509Crl(decryptedCrl); + + return { + ca, + caCrl, + crl: crl.rawData + }; + }; + + /** + * Returns a list of CRL ids for CA with id [caId] + */ + const getCaCrls = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCrlsDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateAuthorities + ); + + const caCrls = await certificateAuthorityCrlDAL.find({ caId: ca.id }, { sort: [["createdAt", "desc"]] }); + + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + + const decryptedCrls = await Promise.all( + caCrls.map(async (caCrl) => { + const decryptedCrl = await kmsDecryptor({ cipherTextBlob: caCrl.encryptedCrl }); + const crl = new x509.X509Crl(decryptedCrl); + + const base64crl = crl.toString("base64"); + const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`; + return { + id: caCrl.id, + crl: crlPem + }; + }) + ); + + return { + ca, + crls: decryptedCrls + }; + }; + + // const rotateCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TRotateCrlDTO) => { + // const ca = await certificateAuthorityDAL.findById(caId); + // if (!ca) throw new BadRequestError({ message: "CA not found" }); + + // const { permission } = await permissionService.getProjectPermission( + // actor, + // actorId, + // ca.projectId, + // actorAuthMethod, + // actorOrgId + // ); + + // ForbiddenError.from(permission).throwUnlessCan( + // ProjectPermissionActions.Read, + // ProjectPermissionSub.CertificateAuthorities + // ); + + // const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id }); + + // const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + + // const keyId = await getProjectKmsCertificateKeyId({ + // projectId: ca.projectId, + // projectDAL, + // kmsService + // }); + + // const privateKey = await kmsService.decrypt({ + // kmsId: keyId, + // cipherTextBlob: caSecret.encryptedPrivateKey + // }); + + // const skObj = crypto.createPrivateKey({ key: privateKey, format: "der", type: "pkcs8" }); + // const sk = await crypto.subtle.importKey("pkcs8", skObj.export({ format: "der", type: "pkcs8" }), alg, true, [ + // "sign" + // ]); + + // const revokedCerts = await certificateDAL.find({ + // caId: ca.id, + // status: CertStatus.REVOKED + // }); + + // const crl = await x509.X509CrlGenerator.create({ + // issuer: ca.dn, + // thisUpdate: new Date(), + // nextUpdate: new Date("2025/12/12"), + // entries: revokedCerts.map((revokedCert) => { + // return { + // serialNumber: revokedCert.serialNumber, + // revocationDate: new Date(revokedCert.revokedAt as Date), + // reason: revokedCert.revocationReason as number, + // invalidity: new Date("2022/01/01"), + // issuer: ca.dn + // }; + // }), + // signingAlgorithm: alg, + // signingKey: sk + // }); + + // const { cipherTextBlob: encryptedCrl } = await kmsService.encrypt({ + // kmsId: keyId, + // plainText: Buffer.from(new Uint8Array(crl.rawData)) + // }); + + // await certificateAuthorityCrlDAL.update( + // { + // caId: ca.id + // }, + // { + // encryptedCrl + // } + // ); + + // const base64crl = crl.toString("base64"); + // const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`; + + // return { + // crl: crlPem + // }; + // }; + + return { + getCrlById, + getCaCrls + // rotateCaCrl + }; +}; diff --git a/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-types.ts b/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-types.ts new file mode 100644 index 0000000000..9b82727e91 --- /dev/null +++ b/backend/src/ee/services/certificate-authority-crl/certificate-authority-crl-types.ts @@ -0,0 +1,7 @@ +import { TProjectPermission } from "@app/lib/types"; + +export type TGetCrlById = string; + +export type TGetCaCrlsDTO = { + caId: string; +} & Omit; diff --git a/backend/src/ee/services/certificate-est/certificate-est-fns.ts b/backend/src/ee/services/certificate-est/certificate-est-fns.ts new file mode 100644 index 0000000000..a3973ae893 --- /dev/null +++ b/backend/src/ee/services/certificate-est/certificate-est-fns.ts @@ -0,0 +1,24 @@ +import { Certificate, ContentInfo, EncapsulatedContentInfo, SignedData } from "pkijs"; + +export const convertRawCertsToPkcs7 = (rawCertificate: ArrayBuffer[]) => { + const certs = rawCertificate.map((rawCert) => Certificate.fromBER(rawCert)); + const cmsSigned = new SignedData({ + encapContentInfo: new EncapsulatedContentInfo({ + eContentType: "1.2.840.113549.1.7.1" // not encrypted and not compressed data + }), + certificates: certs + }); + + const cmsContent = new ContentInfo({ + contentType: "1.2.840.113549.1.7.2", // SignedData + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + content: cmsSigned.toSchema() + }); + + const derBuffer = cmsContent.toSchema().toBER(false); + const base64Pkcs7 = Buffer.from(derBuffer) + .toString("base64") + .replace(/(.{64})/g, "$1\n"); // we add a linebreak for CURL clients + + return base64Pkcs7; +}; diff --git a/backend/src/ee/services/certificate-est/certificate-est-service.ts b/backend/src/ee/services/certificate-est/certificate-est-service.ts new file mode 100644 index 0000000000..5790c8d5a6 --- /dev/null +++ b/backend/src/ee/services/certificate-est/certificate-est-service.ts @@ -0,0 +1,270 @@ +import * as x509 from "@peculiar/x509"; + +import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { isCertChainValid } from "@app/services/certificate/certificate-fns"; +import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal"; +import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; +import { getCaCertChain, getCaCertChains } from "@app/services/certificate-authority/certificate-authority-fns"; +import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service"; +import { TCertificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal"; +import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; + +import { TLicenseServiceFactory } from "../license/license-service"; +import { convertRawCertsToPkcs7 } from "./certificate-est-fns"; + +type TCertificateEstServiceFactoryDep = { + certificateAuthorityService: Pick; + certificateTemplateService: Pick; + certificateTemplateDAL: Pick; + certificateAuthorityDAL: Pick; + certificateAuthorityCertDAL: Pick; + projectDAL: Pick; + kmsService: Pick; + licenseService: Pick; +}; + +export type TCertificateEstServiceFactory = ReturnType; + +export const certificateEstServiceFactory = ({ + certificateAuthorityService, + certificateTemplateService, + certificateTemplateDAL, + certificateAuthorityCertDAL, + certificateAuthorityDAL, + projectDAL, + kmsService, + licenseService +}: TCertificateEstServiceFactoryDep) => { + const simpleReenroll = async ({ + csr, + certificateTemplateId, + sslClientCert + }: { + csr: string; + certificateTemplateId: string; + sslClientCert: string; + }) => { + const estConfig = await certificateTemplateService.getEstConfiguration({ + isInternal: true, + certificateTemplateId + }); + + const plan = await licenseService.getPlan(estConfig.orgId); + if (!plan.pkiEst) { + throw new BadRequestError({ + message: + "Failed to perform EST operation - simpleReenroll due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + if (!estConfig.isEnabled) { + throw new BadRequestError({ + message: "EST is disabled" + }); + } + + const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId); + + const leafCertificate = decodeURIComponent(sslClientCert).match( + /-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g + )?.[0]; + + if (!leafCertificate) { + throw new UnauthorizedError({ message: "Missing client certificate" }); + } + + const cert = new x509.X509Certificate(leafCertificate); + // We have to assert that the client certificate provided can be traced back to the Root CA + const caCertChains = await getCaCertChains({ + caId: certTemplate.caId, + certificateAuthorityCertDAL, + certificateAuthorityDAL, + projectDAL, + kmsService + }); + + const verifiedChains = await Promise.all( + caCertChains.map((chain) => { + const caCert = new x509.X509Certificate(chain.certificate); + const caChain = + chain.certificateChain + .match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g) + ?.map((c) => new x509.X509Certificate(c)) || []; + + return isCertChainValid([cert, caCert, ...caChain]); + }) + ); + + if (!verifiedChains.some(Boolean)) { + throw new BadRequestError({ + message: "Invalid client certificate: unable to build a valid certificate chain" + }); + } + + // We ensure that the Subject and SubjectAltNames of the CSR and the existing certificate are exactly the same + const csrObj = new x509.Pkcs10CertificateRequest(csr); + if (csrObj.subject !== cert.subject) { + throw new BadRequestError({ + message: "Subject mismatch" + }); + } + + let csrSanSet: Set = new Set(); + const csrSanExtension = csrObj.extensions.find((ext) => ext.type === "2.5.29.17"); + if (csrSanExtension) { + const sanNames = new x509.GeneralNames(csrSanExtension.value); + csrSanSet = new Set([...sanNames.items.map((name) => `${name.type}-${name.value}`)]); + } + + let certSanSet: Set = new Set(); + const certSanExtension = cert.extensions.find((ext) => ext.type === "2.5.29.17"); + if (certSanExtension) { + const sanNames = new x509.GeneralNames(certSanExtension.value); + certSanSet = new Set([...sanNames.items.map((name) => `${name.type}-${name.value}`)]); + } + + if (csrSanSet.size !== certSanSet.size || ![...csrSanSet].every((element) => certSanSet.has(element))) { + throw new BadRequestError({ + message: "Subject alternative names mismatch" + }); + } + + const { certificate } = await certificateAuthorityService.signCertFromCa({ + isInternal: true, + certificateTemplateId, + csr + }); + + return convertRawCertsToPkcs7([certificate.rawData]); + }; + + const simpleEnroll = async ({ + csr, + certificateTemplateId, + sslClientCert + }: { + csr: string; + certificateTemplateId: string; + sslClientCert: string; + }) => { + /* We first have to assert that the client certificate provided can be traced back to the attached + CA chain in the EST configuration + */ + const estConfig = await certificateTemplateService.getEstConfiguration({ + isInternal: true, + certificateTemplateId + }); + + const plan = await licenseService.getPlan(estConfig.orgId); + if (!plan.pkiEst) { + throw new BadRequestError({ + message: + "Failed to perform EST operation - simpleEnroll due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + if (!estConfig.isEnabled) { + throw new BadRequestError({ + message: "EST is disabled" + }); + } + + if (!estConfig.disableBootstrapCertValidation) { + const caCerts = estConfig.caChain + .match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g) + ?.map((cert) => { + return new x509.X509Certificate(cert); + }); + + if (!caCerts) { + throw new BadRequestError({ message: "Failed to parse certificate chain" }); + } + + const leafCertificate = decodeURIComponent(sslClientCert).match( + /-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g + )?.[0]; + + if (!leafCertificate) { + throw new BadRequestError({ message: "Missing client certificate" }); + } + + const certObj = new x509.X509Certificate(leafCertificate); + if (!(await isCertChainValid([certObj, ...caCerts]))) { + throw new BadRequestError({ message: "Invalid certificate chain" }); + } + } + + const { certificate } = await certificateAuthorityService.signCertFromCa({ + isInternal: true, + certificateTemplateId, + csr + }); + + return convertRawCertsToPkcs7([certificate.rawData]); + }; + + /** + * Return the CA certificate and CA certificate chain for the CA bound to + * the certificate template with id [certificateTemplateId] as part of EST protocol + */ + const getCaCerts = async ({ certificateTemplateId }: { certificateTemplateId: string }) => { + const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId); + if (!certTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID '${certificateTemplateId}' not found` + }); + } + + const estConfig = await certificateTemplateService.getEstConfiguration({ + isInternal: true, + certificateTemplateId + }); + + const plan = await licenseService.getPlan(estConfig.orgId); + if (!plan.pkiEst) { + throw new BadRequestError({ + message: "Failed to perform EST operation - caCerts due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + if (!estConfig.isEnabled) { + throw new BadRequestError({ + message: "EST is disabled" + }); + } + + const ca = await certificateAuthorityDAL.findById(certTemplate.caId); + if (!ca) { + throw new NotFoundError({ + message: `Certificate Authority with ID '${certTemplate.caId}' not found` + }); + } + + const { caCert, caCertChain } = await getCaCertChain({ + caCertId: ca.activeCaCertId as string, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + const certificates = caCertChain + .match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g) + ?.map((cert) => new x509.X509Certificate(cert)); + + if (!certificates) { + throw new BadRequestError({ message: "Failed to parse certificate chain" }); + } + + const caCertificate = new x509.X509Certificate(caCert); + return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]); + }; + + return { + simpleEnroll, + simpleReenroll, + getCaCerts + }; +}; diff --git a/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-service.ts b/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-service.ts index 1e5487d225..38d7d1abdb 100644 --- a/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-service.ts +++ b/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-service.ts @@ -4,10 +4,13 @@ import ms from "ms"; import { SecretKeyEncoding } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; -import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { + ProjectPermissionDynamicSecretActions, + ProjectPermissionSub +} from "@app/ee/services/permission/project-permission"; import { getConfig } from "@app/lib/config/env"; import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; @@ -61,7 +64,7 @@ export const dynamicSecretLeaseServiceFactory = ({ }: TCreateDynamicSecretLeaseDTO) => { const appCfg = getConfig(); const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const { permission } = await permissionService.getProjectPermission( @@ -72,8 +75,8 @@ export const dynamicSecretLeaseServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.Lease, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const plan = await licenseService.getPlan(actorOrgId); @@ -84,10 +87,16 @@ export const dynamicSecretLeaseServiceFactory = ({ } const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found` + }); const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id }); - if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" }); + if (!dynamicSecretCfg) + throw new NotFoundError({ + message: `Dynamic secret with name '${name}' in folder with path '${path}' not found` + }); const totalLeasesTaken = await dynamicSecretLeaseDAL.countLeasesForDynamicSecret(dynamicSecretCfg.id); if (totalLeasesTaken >= appCfg.MAX_LEASE_LIMIT) @@ -134,7 +143,7 @@ export const dynamicSecretLeaseServiceFactory = ({ leaseId }: TRenewDynamicSecretLeaseDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const { permission } = await permissionService.getProjectPermission( @@ -145,8 +154,8 @@ export const dynamicSecretLeaseServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.Lease, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const plan = await licenseService.getPlan(actorOrgId); @@ -157,10 +166,15 @@ export const dynamicSecretLeaseServiceFactory = ({ } const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found` + }); const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId); - if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" }); + if (!dynamicSecretLease) { + throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` }); + } const dynamicSecretCfg = dynamicSecretLease.dynamicSecret; const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders]; @@ -208,7 +222,7 @@ export const dynamicSecretLeaseServiceFactory = ({ isForced }: TDeleteDynamicSecretLeaseDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const { permission } = await permissionService.getProjectPermission( @@ -219,15 +233,19 @@ export const dynamicSecretLeaseServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Delete, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.Lease, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found` + }); const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId); - if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" }); + if (!dynamicSecretLease) + throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` }); const dynamicSecretCfg = dynamicSecretLease.dynamicSecret; const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders]; @@ -273,7 +291,7 @@ export const dynamicSecretLeaseServiceFactory = ({ actorAuthMethod }: TListDynamicSecretLeasesDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const { permission } = await permissionService.getProjectPermission( @@ -284,15 +302,21 @@ export const dynamicSecretLeaseServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.Lease, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found` + }); const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id }); - if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" }); + if (!dynamicSecretCfg) + throw new NotFoundError({ + message: `Dynamic secret with name '${name}' in folder with path '${path}' not found` + }); const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id }); return dynamicSecretLeases; @@ -309,7 +333,7 @@ export const dynamicSecretLeaseServiceFactory = ({ actorAuthMethod }: TDetailsDynamicSecretLeaseDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const { permission } = await permissionService.getProjectPermission( @@ -320,15 +344,16 @@ export const dynamicSecretLeaseServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.Lease, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) throw new NotFoundError({ message: `Folder with path '${path}' not found` }); const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId); - if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" }); + if (!dynamicSecretLease) + throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` }); return dynamicSecretLease; }; diff --git a/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts b/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts index 0cc4aca2f1..e47d9102da 100644 --- a/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts +++ b/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts @@ -1,10 +1,70 @@ +import { Knex } from "knex"; + import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; -import { ormify } from "@app/lib/knex"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; +import { SecretsOrderBy } from "@app/services/secret/secret-types"; export type TDynamicSecretDALFactory = ReturnType; export const dynamicSecretDALFactory = (db: TDbClient) => { const orm = ormify(db, TableName.DynamicSecret); - return orm; + + // find dynamic secrets for multiple environments (folder IDs are cross env, thus need to rank for pagination) + const listDynamicSecretsByFolderIds = async ( + { + folderIds, + search, + limit, + offset = 0, + orderBy = SecretsOrderBy.Name, + orderDirection = OrderByDirection.ASC + }: { + folderIds: string[]; + search?: string; + limit?: number; + offset?: number; + orderBy?: SecretsOrderBy; + orderDirection?: OrderByDirection; + }, + tx?: Knex + ) => { + try { + const query = (tx || db.replicaNode())(TableName.DynamicSecret) + .whereIn("folderId", folderIds) + .where((bd) => { + if (search) { + void bd.whereILike(`${TableName.DynamicSecret}.name`, `%${search}%`); + } + }) + .leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`) + .leftJoin(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .select( + selectAllTableCols(TableName.DynamicSecret), + db.ref("slug").withSchema(TableName.Environment).as("environment"), + db.raw(`DENSE_RANK() OVER (ORDER BY ${TableName.DynamicSecret}."name" ${orderDirection}) as rank`) + ) + .orderBy(`${TableName.DynamicSecret}.${orderBy}`, orderDirection); + + if (limit) { + const rankOffset = offset + 1; + return await (tx || db) + .with("w", query) + .select("*") + .from[number]>("w") + .where("w.rank", ">=", rankOffset) + .andWhere("w.rank", "<", rankOffset + limit); + } + + const dynamicSecrets = await query; + + return dynamicSecrets; + } catch (error) { + throw new DatabaseError({ error, name: "List dynamic secret multi env" }); + } + }; + + return { ...orm, listDynamicSecretsByFolderIds }; }; diff --git a/backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts b/backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts new file mode 100644 index 0000000000..04aeb3950d --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts @@ -0,0 +1,20 @@ +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError } from "@app/lib/errors"; +import { getDbConnectionHost } from "@app/lib/knex"; + +export const verifyHostInputValidity = (host: string) => { + const appCfg = getConfig(); + const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI); + + if ( + appCfg.isCloud && + // localhost + // internal ips + (host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/)) + ) + throw new BadRequestError({ message: "Invalid db host" }); + + if (host === "localhost" || host === "127.0.0.1" || dbHost === host) { + throw new BadRequestError({ message: "Invalid db host" }); + } +}; diff --git a/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts b/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts index 1aef3cc864..5eff1cdcff 100644 --- a/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts +++ b/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts @@ -3,9 +3,13 @@ import { ForbiddenError, subject } from "@casl/ability"; import { SecretKeyEncoding } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; -import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { + ProjectPermissionDynamicSecretActions, + ProjectPermissionSub +} from "@app/ee/services/permission/project-permission"; import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { OrderByDirection, OrgServiceActor } from "@app/lib/types"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; @@ -17,9 +21,13 @@ import { TCreateDynamicSecretDTO, TDeleteDynamicSecretDTO, TDetailsDynamicSecretDTO, + TGetDynamicSecretsCountDTO, + TListDynamicSecretsByFolderMappingsDTO, TListDynamicSecretsDTO, + TListDynamicSecretsMultiEnvDTO, TUpdateDynamicSecretDTO } from "./dynamic-secret-types"; +import { AzureEntraIDProvider } from "./providers/azure-entra-id"; import { DynamicSecretProviders, TDynamicProviderFns } from "./providers/models"; type TDynamicSecretServiceFactoryDep = { @@ -31,7 +39,7 @@ type TDynamicSecretServiceFactoryDep = { "pruneDynamicSecret" | "unsetLeaseRevocation" >; licenseService: Pick; - folderDAL: Pick; + folderDAL: Pick; projectDAL: Pick; permissionService: Pick; }; @@ -62,7 +70,7 @@ export const dynamicSecretServiceFactory = ({ actorAuthMethod }: TCreateDynamicSecretDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const { permission } = await permissionService.getProjectPermission( @@ -73,8 +81,8 @@ export const dynamicSecretServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Create, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.CreateRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const plan = await licenseService.getPlan(actorOrgId); @@ -85,7 +93,9 @@ export const dynamicSecretServiceFactory = ({ } const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) { + throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` }); + } const existingDynamicSecret = await dynamicSecretDAL.findOne({ name, folderId: folder.id }); if (existingDynamicSecret) @@ -98,6 +108,7 @@ export const dynamicSecretServiceFactory = ({ if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" }); const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs)); + const dynamicSecretCfg = await dynamicSecretDAL.create({ type: provider.type, version: 1, @@ -129,7 +140,7 @@ export const dynamicSecretServiceFactory = ({ actorAuthMethod }: TUpdateDynamicSecretDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; @@ -141,8 +152,8 @@ export const dynamicSecretServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.EditRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const plan = await licenseService.getPlan(actorOrgId); @@ -153,11 +164,15 @@ export const dynamicSecretServiceFactory = ({ } const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` }); const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id }); - if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" }); - + if (!dynamicSecretCfg) { + throw new NotFoundError({ + message: `Dynamic secret with name '${name}' in folder '${folder.path}' not found` + }); + } if (newName) { const existingDynamicSecret = await dynamicSecretDAL.findOne({ name: newName, folderId: folder.id }); if (existingDynamicSecret) @@ -208,7 +223,7 @@ export const dynamicSecretServiceFactory = ({ isForced }: TDeleteDynamicSecretDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; @@ -220,15 +235,18 @@ export const dynamicSecretServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.DeleteRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` }); const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id }); - if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" }); + if (!dynamicSecretCfg) { + throw new NotFoundError({ message: `Dynamic secret with name '${name}' in folder '${folder.path}' not found` }); + } const leases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id }); // when not forced we check with the external system to first remove the things @@ -266,7 +284,7 @@ export const dynamicSecretServiceFactory = ({ actor }: TDetailsDynamicSecretDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const { permission } = await permissionService.getProjectPermission( @@ -277,15 +295,22 @@ export const dynamicSecretServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.ReadRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) + ); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionDynamicSecretActions.EditRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` }); const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id }); - if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" }); + if (!dynamicSecretCfg) { + throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` }); + } const decryptedStoredInput = JSON.parse( infisicalSymmetricDecrypt({ keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding, @@ -299,19 +324,62 @@ export const dynamicSecretServiceFactory = ({ return { ...dynamicSecretCfg, inputs: providerInputs }; }; - const list = async ({ + // get unique dynamic secret count across multiple envs + const getCountMultiEnv = async ({ actorAuthMethod, actorOrgId, actorId, actor, - projectSlug, + projectId, path, - environmentSlug - }: TListDynamicSecretsDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + environmentSlugs, + search, + isInternal + }: TListDynamicSecretsMultiEnvDTO) => { + if (!isInternal) { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); - const projectId = project.id; + // verify user has access to each env in request + environmentSlugs.forEach((environmentSlug) => + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionDynamicSecretActions.ReadRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) + ) + ); + } + + const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path); + if (!folders.length) { + throw new NotFoundError({ + message: `Folders with path '${path}' in environments with slugs '${environmentSlugs.join(", ")}' not found` + }); + } + + const dynamicSecretCfg = await dynamicSecretDAL.find( + { $in: { folderId: folders.map((folder) => folder.id) }, $search: search ? { name: `%${search}%` } : undefined }, + { countDistinct: "name" } + ); + + return Number(dynamicSecretCfg[0]?.count ?? 0); + }; + + // get dynamic secret count for a single env + const getDynamicSecretCount = async ({ + actorAuthMethod, + actorOrgId, + actorId, + actor, + path, + environmentSlug, + search, + projectId + }: TGetDynamicSecretsCountDTO) => { const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -320,22 +388,180 @@ export const dynamicSecretServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path }) + ProjectPermissionDynamicSecretActions.ReadRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) ); const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) { + throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` }); + } - const dynamicSecretCfg = await dynamicSecretDAL.find({ folderId: folder.id }); + const dynamicSecretCfg = await dynamicSecretDAL.find( + { folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined }, + { count: true } + ); + return Number(dynamicSecretCfg[0]?.count ?? 0); + }; + + const listDynamicSecretsByEnv = async ({ + actorAuthMethod, + actorOrgId, + actorId, + actor, + projectSlug, + path, + environmentSlug, + limit, + offset, + orderBy, + orderDirection = OrderByDirection.ASC, + search, + ...params + }: TListDynamicSecretsDTO) => { + let { projectId } = params; + + if (!projectId) { + if (!projectSlug) throw new BadRequestError({ message: "Project ID or slug required" }); + const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); + projectId = project.id; + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionDynamicSecretActions.ReadRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) + ); + + const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path); + if (!folder) + throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` }); + + const dynamicSecretCfg = await dynamicSecretDAL.find( + { folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined }, + { + limit, + offset, + sort: orderBy ? [[orderBy, orderDirection]] : undefined + } + ); return dynamicSecretCfg; }; + const listDynamicSecretsByFolderIds = async ( + { folderMappings, filters, projectId }: TListDynamicSecretsByFolderMappingsDTO, + actor: OrgServiceActor + ) => { + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + projectId, + actor.authMethod, + actor.orgId + ); + + const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) => + permission.can( + ProjectPermissionDynamicSecretActions.ReadRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment, secretPath: path }) + ) + ); + + const groupedFolderMappings = new Map(userAccessibleFolderMappings.map((path) => [path.folderId, path])); + + const dynamicSecrets = await dynamicSecretDAL.listDynamicSecretsByFolderIds({ + folderIds: userAccessibleFolderMappings.map(({ folderId }) => folderId), + ...filters + }); + + return dynamicSecrets.map((dynamicSecret) => { + const { environment, path } = groupedFolderMappings.get(dynamicSecret.folderId)!; + return { + ...dynamicSecret, + environment, + path + }; + }); + }; + + // get dynamic secrets for multiple envs + const listDynamicSecretsByEnvs = async ({ + actorAuthMethod, + actorOrgId, + actorId, + actor, + path, + environmentSlugs, + projectId, + isInternal, + ...params + }: TListDynamicSecretsMultiEnvDTO) => { + if (!isInternal) { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + // verify user has access to each env in request + environmentSlugs.forEach((environmentSlug) => + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionDynamicSecretActions.ReadRootCredential, + subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path }) + ) + ); + } + + const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path); + if (!folders.length) + throw new NotFoundError({ + message: `Folders with path '${path} in environments with slugs '${environmentSlugs.join(", ")}' not found` + }); + + const dynamicSecretCfg = await dynamicSecretDAL.listDynamicSecretsByFolderIds({ + folderIds: folders.map((folder) => folder.id), + ...params + }); + + return dynamicSecretCfg; + }; + + const fetchAzureEntraIdUsers = async ({ + tenantId, + applicationId, + clientSecret + }: { + tenantId: string; + applicationId: string; + clientSecret: string; + }) => { + const azureEntraIdUsers = await AzureEntraIDProvider().fetchAzureEntraIdUsers( + tenantId, + applicationId, + clientSecret + ); + return azureEntraIdUsers; + }; + return { create, updateByName, deleteByName, getDetails, - list + listDynamicSecretsByEnv, + listDynamicSecretsByEnvs, + getDynamicSecretCount, + getCountMultiEnv, + fetchAzureEntraIdUsers, + listDynamicSecretsByFolderIds }; }; diff --git a/backend/src/ee/services/dynamic-secret/dynamic-secret-types.ts b/backend/src/ee/services/dynamic-secret/dynamic-secret-types.ts index 02f2cbb863..957d884c8d 100644 --- a/backend/src/ee/services/dynamic-secret/dynamic-secret-types.ts +++ b/backend/src/ee/services/dynamic-secret/dynamic-secret-types.ts @@ -1,6 +1,7 @@ import { z } from "zod"; -import { TProjectPermission } from "@app/lib/types"; +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; +import { SecretsOrderBy } from "@app/services/secret/secret-types"; import { DynamicSecretProviderSchema } from "./providers/models"; @@ -47,8 +48,33 @@ export type TDetailsDynamicSecretDTO = { projectSlug: string; } & Omit; +export type ListDynamicSecretsFilters = { + offset?: number; + limit?: number; + orderBy?: SecretsOrderBy; + orderDirection?: OrderByDirection; + search?: string; +}; + export type TListDynamicSecretsDTO = { path: string; environmentSlug: string; - projectSlug: string; -} & Omit; + projectSlug?: string; + projectId?: string; +} & ListDynamicSecretsFilters & + Omit; + +export type TListDynamicSecretsByFolderMappingsDTO = { + projectId: string; + folderMappings: { folderId: string; path: string; environment: string }[]; + filters: ListDynamicSecretsFilters; +}; + +export type TListDynamicSecretsMultiEnvDTO = Omit< + TListDynamicSecretsDTO, + "projectId" | "environmentSlug" | "projectSlug" +> & { projectId: string; environmentSlugs: string[]; isInternal?: boolean }; + +export type TGetDynamicSecretsCountDTO = Omit & { + projectId: string; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/aws-elasticache.ts b/backend/src/ee/services/dynamic-secret/providers/aws-elasticache.ts new file mode 100644 index 0000000000..2cb8620296 --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/aws-elasticache.ts @@ -0,0 +1,226 @@ +import { + CreateUserCommand, + CreateUserGroupCommand, + DeleteUserCommand, + DescribeReplicationGroupsCommand, + DescribeUserGroupsCommand, + ElastiCache, + ModifyReplicationGroupCommand, + ModifyUserGroupCommand +} from "@aws-sdk/client-elasticache"; +import handlebars from "handlebars"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { BadRequestError } from "@app/lib/errors"; + +import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models"; + +const CreateElastiCacheUserSchema = z.object({ + UserId: z.string().trim().min(1), + UserName: z.string().trim().min(1), + Engine: z.string().default("redis"), + Passwords: z.array(z.string().trim().min(1)).min(1).max(1), // Minimum password length is 16 characters, required by AWS. + AccessString: z.string().trim().min(1) // Example: "on ~* +@all" +}); + +const DeleteElasticCacheUserSchema = z.object({ + UserId: z.string().trim().min(1) +}); + +type TElastiCacheRedisUser = { userId: string; password: string }; +type TBasicAWSCredentials = { accessKeyId: string; secretAccessKey: string }; + +type TCreateElastiCacheUserInput = z.infer; +type TDeleteElastiCacheUserInput = z.infer; + +const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: string) => { + const elastiCache = new ElastiCache({ + region, + credentials + }); + const infisicalGroup = "infisical-managed-group-elasticache"; + + const ensureInfisicalGroupExists = async (clusterName: string) => { + const replicationGroups = await elastiCache.send(new DescribeUserGroupsCommand()); + + const existingGroup = replicationGroups.UserGroups?.find((group) => group.UserGroupId === infisicalGroup); + + let newlyCreatedGroup = false; + if (!existingGroup) { + const createGroupCommand = new CreateUserGroupCommand({ + UserGroupId: infisicalGroup, + UserIds: ["default"], + Engine: "redis" + }); + + await elastiCache.send(createGroupCommand); + newlyCreatedGroup = true; + } + + if (existingGroup || newlyCreatedGroup) { + const replicationGroup = ( + await elastiCache.send( + new DescribeReplicationGroupsCommand({ + ReplicationGroupId: clusterName + }) + ) + ).ReplicationGroups?.[0]; + + if (!replicationGroup?.UserGroupIds?.includes(infisicalGroup)) { + // If the replication group doesn't have the infisical user group, we need to associate it + const modifyGroupCommand = new ModifyReplicationGroupCommand({ + UserGroupIdsToAdd: [infisicalGroup], + UserGroupIdsToRemove: [], + ApplyImmediately: true, + ReplicationGroupId: clusterName + }); + await elastiCache.send(modifyGroupCommand); + } + } + }; + + const addUserToInfisicalGroup = async (userId: string) => { + // figure out if the default user is already in the group, if it is, then we shouldn't add it again + + const addUserToGroupCommand = new ModifyUserGroupCommand({ + UserGroupId: infisicalGroup, + UserIdsToAdd: [userId], + UserIdsToRemove: [] + }); + + await elastiCache.send(addUserToGroupCommand); + }; + + const createUser = async (creationInput: TCreateElastiCacheUserInput, clusterName: string) => { + await ensureInfisicalGroupExists(clusterName); + + await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user + await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists() + + return { + userId: creationInput.UserId, + password: creationInput.Passwords[0] + }; + }; + + const deleteUser = async ( + deletionInput: TDeleteElastiCacheUserInput + ): Promise> => { + await elastiCache.send(new DeleteUserCommand(deletionInput)); + return { userId: deletionInput.UserId }; + }; + + const verifyCredentials = async (clusterName: string) => { + await elastiCache.send( + new DescribeReplicationGroupsCommand({ + ReplicationGroupId: clusterName + }) + ); + }; + + return { + createUser, + deleteUser, + verifyCredentials + }; +}; + +const generatePassword = () => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 64)(); +}; + +const generateUsername = () => { + const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-"; + return `inf-${customAlphabet(charset, 32)()}`; // Username must start with an ascii letter, so we prepend the username with "inf-" +}; + +export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = DynamicSecretAwsElastiCacheSchema.parse(inputs); + + // We need to ensure the that the creation & revocation statements are valid and can be used to create and revoke users. + // We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements. + CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement)); + DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement)); + + return providerInputs; + }; + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + await ElastiCacheUserManager( + { + accessKeyId: providerInputs.accessKeyId, + secretAccessKey: providerInputs.secretAccessKey + }, + providerInputs.region + ).verifyCredentials(providerInputs.clusterName); + return true; + }; + + const create = async (inputs: unknown, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + if (!(await validateConnection(providerInputs))) { + throw new BadRequestError({ message: "Failed to establish connection" }); + } + + const leaseUsername = generateUsername(); + const leasePassword = generatePassword(); + const leaseExpiration = new Date(expireAt).toISOString(); + + const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({ + username: leaseUsername, + password: leasePassword, + expiration: leaseExpiration + }); + + const parsedStatement = CreateElastiCacheUserSchema.parse(JSON.parse(creationStatement)); + + await ElastiCacheUserManager( + { + accessKeyId: providerInputs.accessKeyId, + secretAccessKey: providerInputs.secretAccessKey + }, + providerInputs.region + ).createUser(parsedStatement, providerInputs.clusterName); + + return { + entityId: leaseUsername, + data: { + DB_USERNAME: leaseUsername, + DB_PASSWORD: leasePassword + } + }; + }; + + const revoke = async (inputs: unknown, entityId: string) => { + const providerInputs = await validateProviderInputs(inputs); + + const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId }); + const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement)); + + await ElastiCacheUserManager( + { + accessKeyId: providerInputs.accessKeyId, + secretAccessKey: providerInputs.secretAccessKey + }, + providerInputs.region + ).deleteUser(parsedStatement); + + return { entityId }; + }; + + const renew = async (inputs: unknown, entityId: string) => { + // Do nothing + return { entityId }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/azure-entra-id.ts b/backend/src/ee/services/dynamic-secret/providers/azure-entra-id.ts new file mode 100644 index 0000000000..e2dfe2d4b5 --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/azure-entra-id.ts @@ -0,0 +1,138 @@ +import axios from "axios"; +import { customAlphabet } from "nanoid"; + +import { BadRequestError } from "@app/lib/errors"; + +import { AzureEntraIDSchema, TDynamicProviderFns } from "./models"; + +const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/"; +const MSFT_LOGIN_URL = "https://login.microsoftonline.com"; + +const generatePassword = () => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 64)(); +}; + +type User = { name: string; id: string; email: string }; + +export const AzureEntraIDProvider = (): TDynamicProviderFns & { + fetchAzureEntraIdUsers: (tenantId: string, applicationId: string, clientSecret: string) => Promise; +} => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await AzureEntraIDSchema.parseAsync(inputs); + return providerInputs; + }; + + const getToken = async ( + tenantId: string, + applicationId: string, + clientSecret: string + ): Promise<{ token?: string; success: boolean }> => { + const response = await axios.post<{ access_token: string }>( + `${MSFT_LOGIN_URL}/${tenantId}/oauth2/v2.0/token`, + { + grant_type: "client_credentials", + client_id: applicationId, + client_secret: clientSecret, + scope: "https://graph.microsoft.com/.default" + }, + { + headers: { + "Content-Type": "application/x-www-form-urlencoded" + } + } + ); + + if (response.status === 200) { + return { token: response.data.access_token, success: true }; + } + return { success: false }; + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret); + return data.success; + }; + + const renew = async (inputs: unknown, entityId: string) => { + // Do nothing + return { entityId }; + }; + + const create = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret); + if (!data.success) { + throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" }); + } + + const password = generatePassword(); + + const response = await axios.patch( + `${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`, + { + passwordProfile: { + forceChangePasswordNextSignIn: false, + password + } + }, + { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${data.token}` + } + } + ); + if (response.status !== 204) { + throw new BadRequestError({ message: "Failed to update password" }); + } + + return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } }; + }; + + const revoke = async (inputs: unknown, entityId: string) => { + // Creates a new password + await create(inputs); + return { entityId }; + }; + + const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => { + const data = await getToken(tenantId, applicationId, clientSecret); + if (!data.success) { + throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" }); + } + + const response = await axios.get<{ value: [{ id: string; displayName: string; userPrincipalName: string }] }>( + `${MSFT_GRAPH_API_URL}/users`, + { + headers: { + "Content-Type": "application/x-www-form-urlencoded", + Authorization: `Bearer ${data.token}` + } + } + ); + + if (response.status !== 200) { + throw new BadRequestError({ message: "Failed to fetch users" }); + } + + const users = response.data.value.map((user) => { + return { + name: user.displayName, + id: user.id, + email: user.userPrincipalName + }; + }); + return users; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew, + fetchAzureEntraIdUsers + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/elastic-search.ts b/backend/src/ee/services/dynamic-secret/providers/elastic-search.ts new file mode 100644 index 0000000000..bfe0ac4435 --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/elastic-search.ts @@ -0,0 +1,110 @@ +import { Client as ElasticSearchClient } from "@elastic/elasticsearch"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { verifyHostInputValidity } from "../dynamic-secret-fns"; +import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models"; + +const generatePassword = () => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 64)(); +}; + +const generateUsername = () => { + return alphaNumericNanoId(32); +}; + +export const ElasticSearchProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs); + verifyHostInputValidity(providerInputs.host); + + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer) => { + const connection = new ElasticSearchClient({ + node: { + url: new URL(`${providerInputs.host}:${providerInputs.port}`), + ...(providerInputs.ca && { + ssl: { + rejectUnauthorized: false, + ca: providerInputs.ca + } + }) + }, + auth: { + ...(providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey + ? { + apiKey: { + api_key: providerInputs.auth.apiKey, + id: providerInputs.auth.apiKeyId + } + } + : { + username: providerInputs.auth.username, + password: providerInputs.auth.password + }) + } + }); + + return connection; + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const infoResponse = await connection + .info() + .then(() => true) + .catch(() => false); + + return infoResponse; + }; + + const create = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const username = generateUsername(); + const password = generatePassword(); + + await connection.security.putUser({ + username, + password, + full_name: "Managed by Infisical.com", + roles: providerInputs.roles + }); + + await connection.close(); + return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } }; + }; + + const revoke = async (inputs: unknown, entityId: string) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + await connection.security.deleteUser({ + username: entityId + }); + + await connection.close(); + return { entityId }; + }; + + const renew = async (inputs: unknown, entityId: string) => { + // Do nothing + return { entityId }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/index.ts b/backend/src/ee/services/dynamic-secret/providers/index.ts index beb6c428e3..f709853796 100644 --- a/backend/src/ee/services/dynamic-secret/providers/index.ts +++ b/backend/src/ee/services/dynamic-secret/providers/index.ts @@ -1,10 +1,31 @@ +import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake"; + +import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache"; import { AwsIamProvider } from "./aws-iam"; +import { AzureEntraIDProvider } from "./azure-entra-id"; import { CassandraProvider } from "./cassandra"; +import { ElasticSearchProvider } from "./elastic-search"; +import { LdapProvider } from "./ldap"; import { DynamicSecretProviders } from "./models"; +import { MongoAtlasProvider } from "./mongo-atlas"; +import { MongoDBProvider } from "./mongo-db"; +import { RabbitMqProvider } from "./rabbit-mq"; +import { RedisDatabaseProvider } from "./redis"; +import { SapHanaProvider } from "./sap-hana"; import { SqlDatabaseProvider } from "./sql-database"; export const buildDynamicSecretProviders = () => ({ [DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(), [DynamicSecretProviders.Cassandra]: CassandraProvider(), - [DynamicSecretProviders.AwsIam]: AwsIamProvider() + [DynamicSecretProviders.AwsIam]: AwsIamProvider(), + [DynamicSecretProviders.Redis]: RedisDatabaseProvider(), + [DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider(), + [DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider(), + [DynamicSecretProviders.MongoDB]: MongoDBProvider(), + [DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(), + [DynamicSecretProviders.RabbitMq]: RabbitMqProvider(), + [DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(), + [DynamicSecretProviders.Ldap]: LdapProvider(), + [DynamicSecretProviders.SapHana]: SapHanaProvider(), + [DynamicSecretProviders.Snowflake]: SnowflakeProvider() }); diff --git a/backend/src/ee/services/dynamic-secret/providers/ldap.ts b/backend/src/ee/services/dynamic-secret/providers/ldap.ts new file mode 100644 index 0000000000..f94e616292 --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/ldap.ts @@ -0,0 +1,282 @@ +import handlebars from "handlebars"; +import ldapjs from "ldapjs"; +import ldif from "ldif"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { BadRequestError } from "@app/lib/errors"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models"; + +const generatePassword = () => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 64)(); +}; + +const encodePassword = (password?: string) => { + const quotedPassword = `"${password}"`; + const utf16lePassword = Buffer.from(quotedPassword, "utf16le"); + const base64Password = utf16lePassword.toString("base64"); + return base64Password; +}; + +const generateUsername = () => { + return alphaNumericNanoId(20); +}; + +const generateLDIF = ({ + username, + password, + ldifTemplate +}: { + username: string; + password?: string; + ldifTemplate: string; +}): string => { + const data = { + Username: username, + Password: password, + EncodedPassword: encodePassword(password) + }; + + const renderTemplate = handlebars.compile(ldifTemplate); + const renderedLdif = renderTemplate(data); + + return renderedLdif; +}; + +export const LdapProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await LdapSchema.parseAsync(inputs); + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer): Promise => { + return new Promise((resolve, reject) => { + const client = ldapjs.createClient({ + url: providerInputs.url, + tlsOptions: { + ca: providerInputs.ca ? providerInputs.ca : null, + rejectUnauthorized: !!providerInputs.ca + }, + reconnect: true, + bindDN: providerInputs.binddn, + bindCredentials: providerInputs.bindpass + }); + + client.on("error", (err: Error) => { + client.unbind(); + reject(new BadRequestError({ message: err.message })); + }); + + client.bind(providerInputs.binddn, providerInputs.bindpass, (err) => { + if (err) { + client.unbind(); + reject(new BadRequestError({ message: err.message })); + } else { + resolve(client); + } + }); + }); + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + return client.connected; + }; + + const executeLdif = async (client: ldapjs.Client, ldif_file: string) => { + type TEntry = { + dn: string; + type: string; + + changes: { + operation?: string; + attribute: { + attribute: string; + }; + value: { + value: string; + }; + values: { + // eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, can be any for ldapjs.Change.modification.values + value: any; + }[]; + }[]; + }; + + let parsedEntries: TEntry[]; + + try { + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + parsedEntries = ldif.parse(ldif_file).entries as TEntry[]; + } catch (err) { + throw new BadRequestError({ + message: "Invalid LDIF format, refer to the documentation at Dynamic secrets > LDAP > LDIF Entries." + }); + } + + const dnArray: string[] = []; + + for await (const entry of parsedEntries) { + const { dn } = entry; + let responseDn: string; + + if (entry.type === "add") { + const attributes: Record = {}; + + entry.changes.forEach((change) => { + const attrName = change.attribute.attribute; + const attrValue = change.value.value; + + attributes[attrName] = Array.isArray(attrValue) ? attrValue : [attrValue]; + }); + + responseDn = await new Promise((resolve, reject) => { + client.add(dn, attributes, (err) => { + if (err) { + reject(new BadRequestError({ message: err.message })); + } else { + resolve(dn); + } + }); + }); + } else if (entry.type === "modify") { + const changes: ldapjs.Change[] = []; + + entry.changes.forEach((change) => { + changes.push( + new ldapjs.Change({ + operation: change.operation || "replace", + modification: { + type: change.attribute.attribute, + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + values: change.values.map((value) => value.value) + } + }) + ); + }); + + responseDn = await new Promise((resolve, reject) => { + client.modify(dn, changes, (err) => { + if (err) { + reject(new BadRequestError({ message: err.message })); + } else { + resolve(dn); + } + }); + }); + } else if (entry.type === "delete") { + responseDn = await new Promise((resolve, reject) => { + client.del(dn, (err) => { + if (err) { + reject(new BadRequestError({ message: err.message })); + } else { + resolve(dn); + } + }); + }); + } else { + client.unbind(); + throw new BadRequestError({ message: `Unsupported operation type ${entry.type}` }); + } + + dnArray.push(responseDn); + } + client.unbind(); + return dnArray; + }; + + const create = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + if (providerInputs.credentialType === LdapCredentialType.Static) { + const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m); + + if (dnMatch) { + const username = dnMatch[1]; + const password = generatePassword(); + + const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif }); + + try { + const dnArray = await executeLdif(client, generatedLdif); + + return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } }; + } catch (err) { + throw new BadRequestError({ message: (err as Error).message }); + } + } else { + throw new BadRequestError({ + message: "Invalid rotation LDIF, missing DN." + }); + } + } else { + const username = generateUsername(); + const password = generatePassword(); + const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif }); + + try { + const dnArray = await executeLdif(client, generatedLdif); + + return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } }; + } catch (err) { + if (providerInputs.rollbackLdif) { + const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif }); + await executeLdif(client, rollbackLdif); + } + throw new BadRequestError({ message: (err as Error).message }); + } + } + }; + + const revoke = async (inputs: unknown, entityId: string) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + if (providerInputs.credentialType === LdapCredentialType.Static) { + const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m); + + if (dnMatch) { + const username = dnMatch[1]; + const password = generatePassword(); + + const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif }); + + try { + const dnArray = await executeLdif(client, generatedLdif); + + return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } }; + } catch (err) { + throw new BadRequestError({ message: (err as Error).message }); + } + } else { + throw new BadRequestError({ + message: "Invalid rotation LDIF, missing DN." + }); + } + } + + const revocationLdif = generateLDIF({ username: entityId, ldifTemplate: providerInputs.revocationLdif }); + + await executeLdif(client, revocationLdif); + + return { entityId }; + }; + + const renew = async (inputs: unknown, entityId: string) => { + // Do nothing + return { entityId }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/models.ts b/backend/src/ee/services/dynamic-secret/providers/models.ts index c11f6ddfb3..d98215fd49 100644 --- a/backend/src/ee/services/dynamic-secret/providers/models.ts +++ b/backend/src/ee/services/dynamic-secret/providers/models.ts @@ -3,9 +3,84 @@ import { z } from "zod"; export enum SqlProviders { Postgres = "postgres", MySQL = "mysql2", - Oracle = "oracledb" + Oracle = "oracledb", + MsSQL = "mssql" } +export enum ElasticSearchAuthTypes { + User = "user", + ApiKey = "api-key" +} + +export enum LdapCredentialType { + Dynamic = "dynamic", + Static = "static" +} + +export const DynamicSecretRedisDBSchema = z.object({ + host: z.string().trim().toLowerCase(), + port: z.number(), + username: z.string().trim(), // this is often "default". + password: z.string().trim().optional(), + creationStatement: z.string().trim(), + revocationStatement: z.string().trim(), + renewStatement: z.string().trim().optional(), + ca: z.string().optional() +}); + +export const DynamicSecretAwsElastiCacheSchema = z.object({ + clusterName: z.string().trim().min(1), + accessKeyId: z.string().trim().min(1), + secretAccessKey: z.string().trim().min(1), + + region: z.string().trim(), + creationStatement: z.string().trim(), + revocationStatement: z.string().trim(), + ca: z.string().optional() +}); + +export const DynamicSecretElasticSearchSchema = z.object({ + host: z.string().trim().min(1), + port: z.number(), + roles: z.array(z.string().trim().min(1)).min(1), + + // two auth types "user, apikey" + auth: z.discriminatedUnion("type", [ + z.object({ + type: z.literal(ElasticSearchAuthTypes.User), + username: z.string().trim(), + password: z.string().trim() + }), + z.object({ + type: z.literal(ElasticSearchAuthTypes.ApiKey), + apiKey: z.string().trim(), + apiKeyId: z.string().trim() + }) + ]), + + ca: z.string().optional() +}); + +export const DynamicSecretRabbitMqSchema = z.object({ + host: z.string().trim().min(1), + port: z.number(), + tags: z.array(z.string().trim()).default([]), + + username: z.string().trim().min(1), + password: z.string().trim().min(1), + + ca: z.string().optional(), + + virtualHost: z.object({ + name: z.string().trim().min(1), + permissions: z.object({ + read: z.string().trim().min(1), + write: z.string().trim().min(1), + configure: z.string().trim().min(1) + }) + }) +}); + export const DynamicSecretSqlDBSchema = z.object({ client: z.nativeEnum(SqlProviders), host: z.string().trim().toLowerCase(), @@ -43,16 +118,139 @@ export const DynamicSecretAwsIamSchema = z.object({ policyArns: z.string().trim().optional() }); +export const DynamicSecretMongoAtlasSchema = z.object({ + adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"), + adminPrivateKey: z.string().trim().min(1).describe("Admin user private api key"), + groupId: z + .string() + .trim() + .min(1) + .describe("Unique 24-hexadecimal digit string that identifies your project. This is same as project id"), + roles: z + .object({ + collectionName: z.string().optional().describe("Collection on which this role applies."), + databaseName: z.string().min(1).describe("Database to which the user is granted access privileges."), + roleName: z + .string() + .min(1) + .describe( + ' Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.' + ) + }) + .array() + .min(1), + scopes: z + .object({ + name: z + .string() + .min(1) + .describe( + "Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access." + ), + type: z + .string() + .min(1) + .describe("Category of resource that this database user can access. Enum: CLUSTER, DATA_LAKE, STREAM") + }) + .array() +}); + +export const DynamicSecretMongoDBSchema = z.object({ + host: z.string().min(1).trim().toLowerCase(), + port: z.number().optional(), + username: z.string().min(1).trim(), + password: z.string().min(1).trim(), + database: z.string().min(1).trim(), + ca: z.string().min(1).optional(), + roles: z + .string() + .array() + .min(1) + .describe( + 'Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.' + ) +}); + +export const DynamicSecretSapHanaSchema = z.object({ + host: z.string().trim().toLowerCase(), + port: z.number(), + username: z.string().trim(), + password: z.string().trim(), + creationStatement: z.string().trim(), + revocationStatement: z.string().trim(), + renewStatement: z.string().trim().optional(), + ca: z.string().optional() +}); + +export const DynamicSecretSnowflakeSchema = z.object({ + accountId: z.string().trim().min(1), + orgId: z.string().trim().min(1), + username: z.string().trim().min(1), + password: z.string().trim().min(1), + creationStatement: z.string().trim().min(1), + revocationStatement: z.string().trim().min(1), + renewStatement: z.string().trim().optional() +}); + +export const AzureEntraIDSchema = z.object({ + tenantId: z.string().trim().min(1), + userId: z.string().trim().min(1), + email: z.string().trim().min(1), + applicationId: z.string().trim().min(1), + clientSecret: z.string().trim().min(1) +}); + +export const LdapSchema = z.union([ + z.object({ + url: z.string().trim().min(1), + binddn: z.string().trim().min(1), + bindpass: z.string().trim().min(1), + ca: z.string().optional(), + credentialType: z.literal(LdapCredentialType.Dynamic).optional().default(LdapCredentialType.Dynamic), + creationLdif: z.string().min(1), + revocationLdif: z.string().min(1), + rollbackLdif: z.string().optional() + }), + z.object({ + url: z.string().trim().min(1), + binddn: z.string().trim().min(1), + bindpass: z.string().trim().min(1), + ca: z.string().optional(), + credentialType: z.literal(LdapCredentialType.Static), + rotationLdif: z.string().min(1) + }) +]); + export enum DynamicSecretProviders { SqlDatabase = "sql-database", Cassandra = "cassandra", - AwsIam = "aws-iam" + AwsIam = "aws-iam", + Redis = "redis", + AwsElastiCache = "aws-elasticache", + MongoAtlas = "mongo-db-atlas", + ElasticSearch = "elastic-search", + MongoDB = "mongo-db", + RabbitMq = "rabbit-mq", + AzureEntraID = "azure-entra-id", + Ldap = "ldap", + SapHana = "sap-hana", + Snowflake = "snowflake" } export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [ z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }), z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }), - z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }) + z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }), + z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }), + z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }), + z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }), + z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }), + z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }), + z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }), + z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }), + z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }), + z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }), + z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }) ]); export type TDynamicProviderFns = { diff --git a/backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts b/backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts new file mode 100644 index 0000000000..69f54ce773 --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts @@ -0,0 +1,146 @@ +import axios, { AxiosError } from "axios"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models"; + +const generatePassword = (size = 48) => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 48)(size); +}; + +const generateUsername = () => { + return alphaNumericNanoId(32); +}; + +export const MongoAtlasProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await DynamicSecretMongoAtlasSchema.parseAsync(inputs); + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer) => { + const client = axios.create({ + baseURL: "https://cloud.mongodb.com/api/atlas", + headers: { + Accept: "application/vnd.atlas.2023-02-01+json", + "Content-Type": "application/json" + } + }); + const digestAuth = createDigestAuthRequestInterceptor( + client, + providerInputs.adminPublicKey, + providerInputs.adminPrivateKey + ); + return digestAuth; + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const isConnected = await client({ + method: "GET", + url: `v2/groups/${providerInputs.groupId}/databaseUsers`, + params: { itemsPerPage: 1 } + }) + .then(() => true) + .catch((error) => { + if ((error as AxiosError).response) { + throw new Error(JSON.stringify((error as AxiosError).response?.data)); + } + throw error; + }); + return isConnected; + }; + + const create = async (inputs: unknown, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const username = generateUsername(); + const password = generatePassword(); + const expiration = new Date(expireAt).toISOString(); + await client({ + method: "POST", + url: `/v2/groups/${providerInputs.groupId}/databaseUsers`, + data: { + roles: providerInputs.roles, + scopes: providerInputs.scopes, + deleteAfterDate: expiration, + username, + password, + databaseName: "admin", + groupId: providerInputs.groupId + } + }).catch((error) => { + if ((error as AxiosError).response) { + throw new Error(JSON.stringify((error as AxiosError).response?.data)); + } + throw error; + }); + return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } }; + }; + + const revoke = async (inputs: unknown, entityId: string) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const username = entityId; + const isExisting = await client({ + method: "GET", + url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}` + }).catch((err) => { + if ((err as AxiosError).response?.status === 404) return false; + throw err; + }); + if (isExisting) { + await client({ + method: "DELETE", + url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}` + }).catch((error) => { + if ((error as AxiosError).response) { + throw new Error(JSON.stringify((error as AxiosError).response?.data)); + } + throw error; + }); + } + + return { entityId: username }; + }; + + const renew = async (inputs: unknown, entityId: string, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const username = entityId; + const expiration = new Date(expireAt).toISOString(); + + await client({ + method: "PATCH", + url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`, + data: { + deleteAfterDate: expiration, + databaseName: "admin", + groupId: providerInputs.groupId + } + }).catch((error) => { + if ((error as AxiosError).response) { + throw new Error(JSON.stringify((error as AxiosError).response?.data)); + } + throw error; + }); + return { entityId: username }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/mongo-db.ts b/backend/src/ee/services/dynamic-secret/providers/mongo-db.ts new file mode 100644 index 0000000000..b824f5aa86 --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/mongo-db.ts @@ -0,0 +1,101 @@ +import { MongoClient } from "mongodb"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { verifyHostInputValidity } from "../dynamic-secret-fns"; +import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models"; + +const generatePassword = (size = 48) => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 48)(size); +}; + +const generateUsername = () => { + return alphaNumericNanoId(32); +}; + +export const MongoDBProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs); + verifyHostInputValidity(providerInputs.host); + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer) => { + const isSrv = !providerInputs.port; + const uri = isSrv + ? `mongodb+srv://${providerInputs.host}` + : `mongodb://${providerInputs.host}:${providerInputs.port}`; + + const client = new MongoClient(uri, { + auth: { + username: providerInputs.username, + password: providerInputs.password + }, + directConnection: !isSrv, + ca: providerInputs.ca + }); + return client; + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const isConnected = await client + .db(providerInputs.database) + .command({ ping: 1 }) + .then(() => true); + + await client.close(); + return isConnected; + }; + + const create = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const username = generateUsername(); + const password = generatePassword(); + + const db = client.db(providerInputs.database); + + await db.command({ + createUser: username, + pwd: password, + roles: providerInputs.roles + }); + await client.close(); + + return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } }; + }; + + const revoke = async (inputs: unknown, entityId: string) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const username = entityId; + + const db = client.db(providerInputs.database); + await db.command({ + dropUser: username + }); + await client.close(); + + return { entityId: username }; + }; + + const renew = async (_inputs: unknown, entityId: string) => { + return { entityId }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts b/backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts new file mode 100644 index 0000000000..00d3b538fc --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts @@ -0,0 +1,156 @@ +import axios, { Axios } from "axios"; +import https from "https"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { removeTrailingSlash } from "@app/lib/fn"; +import { logger } from "@app/lib/logger"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { verifyHostInputValidity } from "../dynamic-secret-fns"; +import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models"; + +const generatePassword = () => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 64)(); +}; + +const generateUsername = () => { + return alphaNumericNanoId(32); +}; + +type TCreateRabbitMQUser = { + axiosInstance: Axios; + createUser: { + username: string; + password: string; + tags: string[]; + }; + virtualHost: { + name: string; + permissions: { + read: string; + write: string; + configure: string; + }; + }; +}; + +type TDeleteRabbitMqUser = { + axiosInstance: Axios; + usernameToDelete: string; +}; + +async function createRabbitMqUser({ axiosInstance, createUser, virtualHost }: TCreateRabbitMQUser): Promise { + try { + // Create user + const userUrl = `/users/${createUser.username}`; + const userData = { + password: createUser.password, + tags: createUser.tags.join(",") + }; + + await axiosInstance.put(userUrl, userData); + + // Set permissions for the virtual host + if (virtualHost) { + const permissionData = { + configure: virtualHost.permissions.configure, + write: virtualHost.permissions.write, + read: virtualHost.permissions.read + }; + + await axiosInstance.put( + `/permissions/${encodeURIComponent(virtualHost.name)}/${createUser.username}`, + permissionData + ); + } + } catch (error) { + logger.error(error, "Error creating RabbitMQ user"); + throw error; + } +} + +async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRabbitMqUser) { + await axiosInstance.delete(`users/${usernameToDelete}`); + return { username: usernameToDelete }; +} + +export const RabbitMqProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs); + verifyHostInputValidity(providerInputs.host); + + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer) => { + const axiosInstance = axios.create({ + baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`, + auth: { + username: providerInputs.username, + password: providerInputs.password + }, + headers: { + "Content-Type": "application/json" + }, + + ...(providerInputs.ca && { + httpsAgent: new https.Agent({ ca: providerInputs.ca, rejectUnauthorized: false }) + }) + }); + + return axiosInstance; + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const infoResponse = await connection.get("/whoami").then(() => true); + + return infoResponse; + }; + + const create = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const username = generateUsername(); + const password = generatePassword(); + + await createRabbitMqUser({ + axiosInstance: connection, + virtualHost: providerInputs.virtualHost, + createUser: { + password, + username, + tags: [...(providerInputs.tags ?? []), "infisical-user"] + } + }); + + return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } }; + }; + + const revoke = async (inputs: unknown, entityId: string) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId }); + + return { entityId }; + }; + + const renew = async (inputs: unknown, entityId: string) => { + // Do nothing + return { entityId }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/redis.ts b/backend/src/ee/services/dynamic-secret/providers/redis.ts new file mode 100644 index 0000000000..0e7ae99a0d --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/redis.ts @@ -0,0 +1,167 @@ +import handlebars from "handlebars"; +import { Redis } from "ioredis"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { BadRequestError } from "@app/lib/errors"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { verifyHostInputValidity } from "../dynamic-secret-fns"; +import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models"; + +const generatePassword = () => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 64)(); +}; + +const generateUsername = () => { + return alphaNumericNanoId(32); +}; + +const executeTransactions = async (connection: Redis, commands: string[]): Promise<(string | null)[] | null> => { + // Initiate a transaction + const pipeline = connection.multi(); + + // Add all commands to the pipeline + for (const command of commands) { + const args = command + .split(" ") + .map((arg) => arg.trim()) + .filter((arg) => arg.length > 0); + pipeline.call(args[0], ...args.slice(1)); + } + + // Execute the transaction + const results = await pipeline.exec(); + + if (!results) { + throw new BadRequestError({ message: "Redis transaction failed: No results returned" }); + } + + // Check for errors in the results + const errors = results.filter(([err]) => err !== null); + if (errors.length > 0) { + throw new BadRequestError({ message: "Redis transaction failed with errors" }); + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + return results.map(([_, result]) => result as string | null); +}; + +export const RedisDatabaseProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs); + verifyHostInputValidity(providerInputs.host); + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer) => { + let connection: Redis | null = null; + try { + connection = new Redis({ + username: providerInputs.username, + host: providerInputs.host, + port: providerInputs.port, + password: providerInputs.password, + ...(providerInputs.ca && { + tls: { + rejectUnauthorized: false, + ca: providerInputs.ca + } + }) + }); + + let result: string; + if (providerInputs.password) { + result = await connection.auth(providerInputs.username, providerInputs.password, () => {}); + } else { + result = await connection.auth(providerInputs.username, () => {}); + } + + if (result !== "OK") { + throw new BadRequestError({ message: `Invalid credentials, Redis returned ${result} status` }); + } + + return connection; + } catch (err) { + if (connection) await connection.quit(); + + throw err; + } + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const pingResponse = await connection + .ping() + .then(() => true) + .catch(() => false); + + return pingResponse; + }; + + const create = async (inputs: unknown, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const username = generateUsername(); + const password = generatePassword(); + const expiration = new Date(expireAt).toISOString(); + + const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({ + username, + password, + expiration + }); + + const queries = creationStatement.toString().split(";").filter(Boolean); + + await executeTransactions(connection, queries); + + await connection.quit(); + return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } }; + }; + + const revoke = async (inputs: unknown, entityId: string) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const username = entityId; + + const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username }); + const queries = revokeStatement.toString().split(";").filter(Boolean); + + await executeTransactions(connection, queries); + + await connection.quit(); + return { entityId: username }; + }; + + const renew = async (inputs: unknown, entityId: string, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + const connection = await getClient(providerInputs); + + const username = entityId; + const expiration = new Date(expireAt).toISOString(); + + const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration }); + + if (renewStatement) { + const queries = renewStatement.toString().split(";").filter(Boolean); + await executeTransactions(connection, queries); + } + + await connection.quit(); + return { entityId: username }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/sap-hana.ts b/backend/src/ee/services/dynamic-secret/providers/sap-hana.ts new file mode 100644 index 0000000000..d120cf4fef --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/sap-hana.ts @@ -0,0 +1,174 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +/* eslint-disable @typescript-eslint/no-unsafe-call */ +/* eslint-disable @typescript-eslint/no-unsafe-return */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ + +import handlebars from "handlebars"; +import hdb from "hdb"; +import { customAlphabet } from "nanoid"; +import { z } from "zod"; + +import { BadRequestError } from "@app/lib/errors"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { verifyHostInputValidity } from "../dynamic-secret-fns"; +import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models"; + +const generatePassword = (size = 48) => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; + return customAlphabet(charset, 48)(size); +}; + +const generateUsername = () => { + return alphaNumericNanoId(32); +}; + +export const SapHanaProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs); + + verifyHostInputValidity(providerInputs.host); + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer) => { + const client = hdb.createClient({ + host: providerInputs.host, + port: providerInputs.port, + user: providerInputs.username, + password: providerInputs.password, + ...(providerInputs.ca + ? { + ca: providerInputs.ca + } + : {}) + }); + + await new Promise((resolve, reject) => { + client.connect((err: any) => { + if (err) { + return reject(err); + } + + if (client.readyState) { + return resolve(true); + } + + reject(new Error("SAP HANA client not ready")); + }); + }); + + return client; + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + const testResult: boolean = await new Promise((resolve, reject) => { + client.exec("SELECT 1 FROM DUMMY;", (err: any) => { + if (err) { + reject(); + } + + resolve(true); + }); + }); + + return testResult; + }; + + const create = async (inputs: unknown, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + + const username = generateUsername(); + const password = generatePassword(); + const expiration = new Date(expireAt).toISOString(); + + const client = await getClient(providerInputs); + const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({ + username, + password, + expiration + }); + + const queries = creationStatement.toString().split(";").filter(Boolean); + for await (const query of queries) { + await new Promise((resolve, reject) => { + client.exec(query, (err: any) => { + if (err) { + reject( + new BadRequestError({ + message: err.message + }) + ); + } + resolve(true); + }); + }); + } + + return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } }; + }; + + const revoke = async (inputs: unknown, username: string) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username }); + const queries = revokeStatement.toString().split(";").filter(Boolean); + for await (const query of queries) { + await new Promise((resolve, reject) => { + client.exec(query, (err: any) => { + if (err) { + reject( + new BadRequestError({ + message: err.message + }) + ); + } + resolve(true); + }); + }); + } + + return { entityId: username }; + }; + + const renew = async (inputs: unknown, username: string, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + try { + const expiration = new Date(expireAt).toISOString(); + + const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration }); + const queries = renewStatement.toString().split(";").filter(Boolean); + for await (const query of queries) { + await new Promise((resolve, reject) => { + client.exec(query, (err: any) => { + if (err) { + reject( + new BadRequestError({ + message: err.message + }) + ); + } + resolve(true); + }); + }); + } + } finally { + client.disconnect(); + } + + return { entityId: username }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/snowflake.ts b/backend/src/ee/services/dynamic-secret/providers/snowflake.ts new file mode 100644 index 0000000000..27ac3f49ca --- /dev/null +++ b/backend/src/ee/services/dynamic-secret/providers/snowflake.ts @@ -0,0 +1,174 @@ +import handlebars from "handlebars"; +import { customAlphabet } from "nanoid"; +import snowflake from "snowflake-sdk"; +import { z } from "zod"; + +import { BadRequestError } from "@app/lib/errors"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models"; + +// destroy client requires callback... +const noop = () => {}; + +const generatePassword = (size = 48) => { + const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#"; + return customAlphabet(charset, 48)(size); +}; + +const generateUsername = () => { + return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix +}; + +const getDaysToExpiry = (expiryDate: Date) => { + const start = new Date().getTime(); + const end = new Date(expiryDate).getTime(); + const diffTime = Math.abs(end - start); + + return Math.ceil(diffTime / (1000 * 60 * 60 * 24)); +}; + +export const SnowflakeProvider = (): TDynamicProviderFns => { + const validateProviderInputs = async (inputs: unknown) => { + const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs); + return providerInputs; + }; + + const getClient = async (providerInputs: z.infer) => { + const client = snowflake.createConnection({ + account: `${providerInputs.orgId}-${providerInputs.accountId}`, + username: providerInputs.username, + password: providerInputs.password, + application: "Infisical" + }); + + await client.connectAsync(noop); + + return client; + }; + + const validateConnection = async (inputs: unknown) => { + const providerInputs = await validateProviderInputs(inputs); + const client = await getClient(providerInputs); + + let isValidConnection: boolean; + + try { + isValidConnection = await Promise.race([ + client.isValidAsync(), + new Promise((resolve) => { + setTimeout(resolve, 10000); + }).then(() => { + throw new BadRequestError({ message: "Unable to establish connection - verify credentials" }); + }) + ]); + } finally { + client.destroy(noop); + } + + return isValidConnection; + }; + + const create = async (inputs: unknown, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + + const client = await getClient(providerInputs); + + const username = generateUsername(); + const password = generatePassword(); + + try { + const expiration = getDaysToExpiry(new Date(expireAt)); + const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({ + username, + password, + expiration + }); + + await new Promise((resolve, reject) => { + client.execute({ + sqlText: creationStatement, + complete(err) { + if (err) { + return reject(new BadRequestError({ name: "CreateLease", message: err.message })); + } + + return resolve(true); + } + }); + }); + } finally { + client.destroy(noop); + } + + return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } }; + }; + + const revoke = async (inputs: unknown, username: string) => { + const providerInputs = await validateProviderInputs(inputs); + + const client = await getClient(providerInputs); + + try { + const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username }); + + await new Promise((resolve, reject) => { + client.execute({ + sqlText: revokeStatement, + complete(err) { + if (err) { + return reject(new BadRequestError({ name: "RevokeLease", message: err.message })); + } + + return resolve(true); + } + }); + }); + } finally { + client.destroy(noop); + } + + return { entityId: username }; + }; + + const renew = async (inputs: unknown, username: string, expireAt: number) => { + const providerInputs = await validateProviderInputs(inputs); + + if (!providerInputs.renewStatement) return { entityId: username }; + + const client = await getClient(providerInputs); + + try { + const expiration = getDaysToExpiry(new Date(expireAt)); + const renewStatement = handlebars.compile(providerInputs.renewStatement)({ + username, + expiration + }); + + await new Promise((resolve, reject) => { + client.execute({ + sqlText: renewStatement, + complete(err) { + if (err) { + return reject(new BadRequestError({ name: "RenewLease", message: err.message })); + } + + return resolve(true); + } + }); + }); + } finally { + client.destroy(noop); + } + + return { entityId: username }; + }; + + return { + validateProviderInputs, + validateConnection, + create, + revoke, + renew + }; +}; diff --git a/backend/src/ee/services/dynamic-secret/providers/sql-database.ts b/backend/src/ee/services/dynamic-secret/providers/sql-database.ts index 6745f573b7..6acf23b065 100644 --- a/backend/src/ee/services/dynamic-secret/providers/sql-database.ts +++ b/backend/src/ee/services/dynamic-secret/providers/sql-database.ts @@ -3,11 +3,9 @@ import knex from "knex"; import { customAlphabet } from "nanoid"; import { z } from "zod"; -import { getConfig } from "@app/lib/config/env"; -import { BadRequestError } from "@app/lib/errors"; -import { getDbConnectionHost } from "@app/lib/knex"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { verifyHostInputValidity } from "../dynamic-secret-fns"; import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models"; const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000; @@ -29,27 +27,8 @@ const generateUsername = (provider: SqlProviders) => { export const SqlDatabaseProvider = (): TDynamicProviderFns => { const validateProviderInputs = async (inputs: unknown) => { - const appCfg = getConfig(); - const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not - const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI); - const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs); - if ( - isCloud && - // localhost - // internal ips - (providerInputs.host === "host.docker.internal" || - providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) || - providerInputs.host.match(/^192\.168\.\d+\.\d+/)) - ) - throw new BadRequestError({ message: "Invalid db host" }); - if ( - providerInputs.host === "localhost" || - providerInputs.host === "127.0.0.1" || - // database infisical uses - dbHost === providerInputs.host - ) - throw new BadRequestError({ message: "Invalid db host" }); + verifyHostInputValidity(providerInputs.host); return providerInputs; }; diff --git a/backend/src/ee/services/external-kms/external-kms-dal.ts b/backend/src/ee/services/external-kms/external-kms-dal.ts new file mode 100644 index 0000000000..595ccb8109 --- /dev/null +++ b/backend/src/ee/services/external-kms/external-kms-dal.ts @@ -0,0 +1,49 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName, TKmsKeys } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; + +export type TExternalKmsDALFactory = ReturnType; + +export const externalKmsDALFactory = (db: TDbClient) => { + const externalKmsOrm = ormify(db, TableName.ExternalKms); + + const find = async (filter: Partial, tx?: Knex) => { + try { + const result = await (tx || db.replicaNode())(TableName.ExternalKms) + .join(TableName.KmsKey, `${TableName.KmsKey}.id`, `${TableName.ExternalKms}.kmsKeyId`) + .where(filter) + .select(selectAllTableCols(TableName.KmsKey)) + .select( + db.ref("id").withSchema(TableName.ExternalKms).as("externalKmsId"), + db.ref("provider").withSchema(TableName.ExternalKms).as("externalKmsProvider"), + db.ref("encryptedProviderInputs").withSchema(TableName.ExternalKms).as("externalKmsEncryptedProviderInput"), + db.ref("status").withSchema(TableName.ExternalKms).as("externalKmsStatus"), + db.ref("statusDetails").withSchema(TableName.ExternalKms).as("externalKmsStatusDetails") + ); + + return result.map((el) => ({ + id: el.id, + description: el.description, + isDisabled: el.isDisabled, + isReserved: el.isReserved, + orgId: el.orgId, + name: el.name, + createdAt: el.createdAt, + updatedAt: el.updatedAt, + externalKms: { + id: el.externalKmsId, + provider: el.externalKmsProvider, + status: el.externalKmsStatus, + statusDetails: el.externalKmsStatusDetails + } + })); + } catch (error) { + throw new DatabaseError({ error, name: "Find" }); + } + }; + + return { ...externalKmsOrm, find }; +}; diff --git a/backend/src/ee/services/external-kms/external-kms-service.ts b/backend/src/ee/services/external-kms/external-kms-service.ts new file mode 100644 index 0000000000..c3b774afd4 --- /dev/null +++ b/backend/src/ee/services/external-kms/external-kms-service.ts @@ -0,0 +1,332 @@ +import { ForbiddenError } from "@casl/ability"; +import slugify from "@sindresorhus/slugify"; + +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { KmsDataKey } from "@app/services/kms/kms-types"; + +import { TLicenseServiceFactory } from "../license/license-service"; +import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission"; +import { TPermissionServiceFactory } from "../permission/permission-service"; +import { TExternalKmsDALFactory } from "./external-kms-dal"; +import { + TCreateExternalKmsDTO, + TDeleteExternalKmsDTO, + TGetExternalKmsByIdDTO, + TGetExternalKmsBySlugDTO, + TListExternalKmsDTO, + TUpdateExternalKmsDTO +} from "./external-kms-types"; +import { AwsKmsProviderFactory } from "./providers/aws-kms"; +import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model"; + +type TExternalKmsServiceFactoryDep = { + externalKmsDAL: TExternalKmsDALFactory; + kmsService: Pick; + kmsDAL: Pick; + permissionService: Pick; + licenseService: Pick; +}; + +export type TExternalKmsServiceFactory = ReturnType; + +export const externalKmsServiceFactory = ({ + externalKmsDAL, + permissionService, + licenseService, + kmsService, + kmsDAL +}: TExternalKmsServiceFactoryDep) => { + const create = async ({ + provider, + description, + actor, + name, + actorId, + actorOrgId, + actorAuthMethod + }: TCreateExternalKmsDTO) => { + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Kms); + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.externalKms) { + throw new BadRequestError({ + message: "Failed to create external KMS due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + const kmsName = name ? slugify(name) : slugify(alphaNumericNanoId(8).toLowerCase()); + + let sanitizedProviderInput = ""; + switch (provider.type) { + case KmsProviders.Aws: + { + const externalKms = await AwsKmsProviderFactory({ inputs: provider.inputs }); + // if missing kms key this generate a new kms key id and returns new provider input + const newProviderInput = await externalKms.generateInputKmsKey(); + sanitizedProviderInput = JSON.stringify(newProviderInput); + + await externalKms.validateConnection(); + } + break; + default: + throw new BadRequestError({ message: "external kms provided is invalid" }); + } + + const { encryptor: orgDataKeyEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: actorOrgId + }); + + const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({ + plainText: Buffer.from(sanitizedProviderInput, "utf8") + }); + + const externalKms = await externalKmsDAL.transaction(async (tx) => { + const kms = await kmsDAL.create( + { + isReserved: false, + description, + name: kmsName, + orgId: actorOrgId + }, + tx + ); + const externalKmsCfg = await externalKmsDAL.create( + { + provider: provider.type, + encryptedProviderInputs, + kmsKeyId: kms.id + }, + tx + ); + return { ...kms, external: externalKmsCfg }; + }); + + return externalKms; + }; + + const updateById = async ({ + provider, + description, + actor, + id: kmsId, + name, + actorId, + actorOrgId, + actorAuthMethod + }: TUpdateExternalKmsDTO) => { + const kmsDoc = await kmsDAL.findById(kmsId); + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + kmsDoc.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms); + + const plan = await licenseService.getPlan(kmsDoc.orgId); + if (!plan.externalKms) { + throw new BadRequestError({ + message: "Failed to update external KMS due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + const kmsName = name ? slugify(name) : undefined; + + const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id }); + if (!externalKmsDoc) throw new NotFoundError({ message: `External KMS with ID '${kmsId}' not found` }); + + let sanitizedProviderInput = ""; + const { encryptor: orgDataKeyEncryptor, decryptor: orgDataKeyDecryptor } = + await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: actorOrgId + }); + if (provider) { + const decryptedProviderInputBlob = orgDataKeyDecryptor({ + cipherTextBlob: externalKmsDoc.encryptedProviderInputs + }); + + switch (provider.type) { + case KmsProviders.Aws: + { + const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync( + JSON.parse(decryptedProviderInputBlob.toString("utf8")) + ); + const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs }; + const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput }); + await externalKms.validateConnection(); + sanitizedProviderInput = JSON.stringify(updatedProviderInput); + } + break; + default: + throw new BadRequestError({ message: "external kms provided is invalid" }); + } + } + + let encryptedProviderInputs: Buffer | undefined; + if (sanitizedProviderInput) { + const { cipherTextBlob } = orgDataKeyEncryptor({ + plainText: Buffer.from(sanitizedProviderInput, "utf8") + }); + encryptedProviderInputs = cipherTextBlob; + } + + const externalKms = await externalKmsDAL.transaction(async (tx) => { + const kms = await kmsDAL.updateById( + kmsDoc.id, + { + description, + name: kmsName + }, + tx + ); + if (encryptedProviderInputs) { + const externalKmsCfg = await externalKmsDAL.updateById( + externalKmsDoc.id, + { + encryptedProviderInputs + }, + tx + ); + return { ...kms, external: externalKmsCfg }; + } + return { ...kms, external: externalKmsDoc }; + }); + + return externalKms; + }; + + const deleteById = async ({ actor, id: kmsId, actorId, actorOrgId, actorAuthMethod }: TDeleteExternalKmsDTO) => { + const kmsDoc = await kmsDAL.findById(kmsId); + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + kmsDoc.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms); + + const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id }); + if (!externalKmsDoc) throw new NotFoundError({ message: `External KMS with ID '${kmsId}' not found` }); + + const externalKms = await externalKmsDAL.transaction(async (tx) => { + const kms = await kmsDAL.deleteById(kmsDoc.id, tx); + return { ...kms, external: externalKmsDoc }; + }); + + return externalKms; + }; + + const list = async ({ actor, actorId, actorOrgId, actorAuthMethod }: TListExternalKmsDTO) => { + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Kms); + + const externalKmsDocs = await externalKmsDAL.find({ orgId: actorOrgId }); + + return externalKmsDocs; + }; + + const findById = async ({ actor, actorId, actorOrgId, actorAuthMethod, id: kmsId }: TGetExternalKmsByIdDTO) => { + const kmsDoc = await kmsDAL.findById(kmsId); + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + kmsDoc.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Kms); + + const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id }); + if (!externalKmsDoc) throw new NotFoundError({ message: `External KMS with ID '${kmsId}' not found` }); + + const { decryptor: orgDataKeyDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: actorOrgId + }); + + const decryptedProviderInputBlob = orgDataKeyDecryptor({ + cipherTextBlob: externalKmsDoc.encryptedProviderInputs + }); + switch (externalKmsDoc.provider) { + case KmsProviders.Aws: { + const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync( + JSON.parse(decryptedProviderInputBlob.toString("utf8")) + ); + return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } }; + } + default: + throw new BadRequestError({ message: "external kms provided is invalid" }); + } + }; + + const findByName = async ({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + name: kmsName + }: TGetExternalKmsBySlugDTO) => { + const kmsDoc = await kmsDAL.findOne({ name: kmsName, orgId: actorOrgId }); + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + kmsDoc.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Kms); + + const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id }); + if (!externalKmsDoc) throw new NotFoundError({ message: `External KMS with ID '${kmsDoc.id}' not found` }); + + const { decryptor: orgDataKeyDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: actorOrgId + }); + + const decryptedProviderInputBlob = orgDataKeyDecryptor({ + cipherTextBlob: externalKmsDoc.encryptedProviderInputs + }); + + switch (externalKmsDoc.provider) { + case KmsProviders.Aws: { + const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync( + JSON.parse(decryptedProviderInputBlob.toString("utf8")) + ); + return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } }; + } + default: + throw new BadRequestError({ message: "external kms provided is invalid" }); + } + }; + + return { + create, + updateById, + deleteById, + list, + findById, + findByName + }; +}; diff --git a/backend/src/ee/services/external-kms/external-kms-types.ts b/backend/src/ee/services/external-kms/external-kms-types.ts new file mode 100644 index 0000000000..850108ac47 --- /dev/null +++ b/backend/src/ee/services/external-kms/external-kms-types.ts @@ -0,0 +1,30 @@ +import { TOrgPermission } from "@app/lib/types"; + +import { TExternalKmsInputSchema, TExternalKmsInputUpdateSchema } from "./providers/model"; + +export type TCreateExternalKmsDTO = { + name?: string; + description?: string; + provider: TExternalKmsInputSchema; +} & Omit; + +export type TUpdateExternalKmsDTO = { + id: string; + name?: string; + description?: string; + provider?: TExternalKmsInputUpdateSchema; +} & Omit; + +export type TDeleteExternalKmsDTO = { + id: string; +} & Omit; + +export type TListExternalKmsDTO = Omit; + +export type TGetExternalKmsByIdDTO = { + id: string; +} & Omit; + +export type TGetExternalKmsBySlugDTO = { + name: string; +} & Omit; diff --git a/backend/src/ee/services/external-kms/providers/aws-kms.ts b/backend/src/ee/services/external-kms/providers/aws-kms.ts new file mode 100644 index 0000000000..6d9166a3a9 --- /dev/null +++ b/backend/src/ee/services/external-kms/providers/aws-kms.ts @@ -0,0 +1,111 @@ +import { CreateKeyCommand, DecryptCommand, DescribeKeyCommand, EncryptCommand, KMSClient } from "@aws-sdk/client-kms"; +import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts"; +import { randomUUID } from "crypto"; + +import { ExternalKmsAwsSchema, KmsAwsCredentialType, TExternalKmsAwsSchema, TExternalKmsProviderFns } from "./model"; + +const getAwsKmsClient = async (providerInputs: TExternalKmsAwsSchema) => { + if (providerInputs.credential.type === KmsAwsCredentialType.AssumeRole) { + const awsCredential = providerInputs.credential.data; + const stsClient = new STSClient({ + region: providerInputs.awsRegion + }); + const command = new AssumeRoleCommand({ + RoleArn: awsCredential.assumeRoleArn, + RoleSessionName: `infisical-kms-${randomUUID()}`, + DurationSeconds: 900, // 15mins + ExternalId: awsCredential.externalId + }); + const response = await stsClient.send(command); + if (!response.Credentials?.AccessKeyId || !response.Credentials?.SecretAccessKey) + throw new Error("Failed to assume role"); + + const kmsClient = new KMSClient({ + region: providerInputs.awsRegion, + credentials: { + accessKeyId: response.Credentials.AccessKeyId, + secretAccessKey: response.Credentials.SecretAccessKey, + sessionToken: response.Credentials.SessionToken, + expiration: response.Credentials.Expiration + } + }); + return kmsClient; + } + const awsCredential = providerInputs.credential.data; + const kmsClient = new KMSClient({ + region: providerInputs.awsRegion, + credentials: { + accessKeyId: awsCredential.accessKey, + secretAccessKey: awsCredential.secretKey + } + }); + return kmsClient; +}; + +type AwsKmsProviderArgs = { + inputs: unknown; +}; +type TAwsKmsProviderFactoryReturn = TExternalKmsProviderFns & { + generateInputKmsKey: () => Promise; +}; + +export const AwsKmsProviderFactory = async ({ inputs }: AwsKmsProviderArgs): Promise => { + let providerInputs = await ExternalKmsAwsSchema.parseAsync(inputs); + let awsClient = await getAwsKmsClient(providerInputs); + + const generateInputKmsKey = async () => { + if (providerInputs.kmsKeyId) return providerInputs; + + const command = new CreateKeyCommand({ Tags: [{ TagKey: "author", TagValue: "infisical" }] }); + const kmsKey = await awsClient.send(command); + + if (!kmsKey.KeyMetadata?.KeyId) throw new Error("Failed to generate kms key"); + + const updatedProviderInputs = await ExternalKmsAwsSchema.parseAsync({ + ...providerInputs, + kmsKeyId: kmsKey.KeyMetadata?.KeyId + }); + + providerInputs = updatedProviderInputs; + awsClient = await getAwsKmsClient(providerInputs); + + return updatedProviderInputs; + }; + + const validateConnection = async () => { + const command = new DescribeKeyCommand({ + KeyId: providerInputs.kmsKeyId + }); + const isConnected = await awsClient.send(command).then(() => true); + return isConnected; + }; + + const encrypt = async (data: Buffer) => { + const command = new EncryptCommand({ + KeyId: providerInputs.kmsKeyId, + Plaintext: data + }); + const encryptionCommand = await awsClient.send(command); + if (!encryptionCommand.CiphertextBlob) throw new Error("encryption failed"); + + return { encryptedBlob: Buffer.from(encryptionCommand.CiphertextBlob) }; + }; + + const decrypt = async (encryptedBlob: Buffer) => { + const command = new DecryptCommand({ + KeyId: providerInputs.kmsKeyId, + CiphertextBlob: encryptedBlob + }); + const decryptionCommand = await awsClient.send(command); + if (!decryptionCommand.Plaintext) throw new Error("decryption failed"); + + return { data: Buffer.from(decryptionCommand.Plaintext) }; + }; + + return { + generateInputKmsKey, + validateConnection, + encrypt, + decrypt + }; +}; diff --git a/backend/src/ee/services/external-kms/providers/model.ts b/backend/src/ee/services/external-kms/providers/model.ts new file mode 100644 index 0000000000..5a87e0c982 --- /dev/null +++ b/backend/src/ee/services/external-kms/providers/model.ts @@ -0,0 +1,61 @@ +import { z } from "zod"; + +export enum KmsProviders { + Aws = "aws" +} + +export enum KmsAwsCredentialType { + AssumeRole = "assume-role", + AccessKey = "access-key" +} + +export const ExternalKmsAwsSchema = z.object({ + credential: z + .discriminatedUnion("type", [ + z.object({ + type: z.literal(KmsAwsCredentialType.AccessKey), + data: z.object({ + accessKey: z.string().trim().min(1).describe("AWS user account access key"), + secretKey: z.string().trim().min(1).describe("AWS user account secret key") + }) + }), + z.object({ + type: z.literal(KmsAwsCredentialType.AssumeRole), + data: z.object({ + assumeRoleArn: z.string().trim().min(1).describe("AWS user role to be assumed by infisical"), + externalId: z + .string() + .trim() + .min(1) + .optional() + .describe("AWS assume role external id for furthur security in authentication") + }) + }) + ]) + .describe("AWS credential information to connect"), + awsRegion: z.string().min(1).trim().describe("AWS region to connect"), + kmsKeyId: z + .string() + .trim() + .optional() + .describe("A pre existing AWS KMS key id to be used for encryption. If not provided a kms key will be generated.") +}); +export type TExternalKmsAwsSchema = z.infer; + +// The root schema of the JSON +export const ExternalKmsInputSchema = z.discriminatedUnion("type", [ + z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema }) +]); +export type TExternalKmsInputSchema = z.infer; + +export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [ + z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() }) +]); +export type TExternalKmsInputUpdateSchema = z.infer; + +// generic function shared by all provider +export type TExternalKmsProviderFns = { + validateConnection: () => Promise; + encrypt: (data: Buffer) => Promise<{ encryptedBlob: Buffer }>; + decrypt: (encryptedBlob: Buffer) => Promise<{ data: Buffer }>; +}; diff --git a/backend/src/ee/services/group/group-dal.ts b/backend/src/ee/services/group/group-dal.ts index 3da1f242c3..5e25f61138 100644 --- a/backend/src/ee/services/group/group-dal.ts +++ b/backend/src/ee/services/group/group-dal.ts @@ -12,7 +12,7 @@ export const groupDALFactory = (db: TDbClient) => { const findGroups = async (filter: TFindFilter, { offset, limit, sort, tx }: TFindOpt = {}) => { try { - const query = (tx || db)(TableName.Groups) + const query = (tx || db.replicaNode())(TableName.Groups) // eslint-disable-next-line .where(buildFindFilter(filter)) .select(selectAllTableCols(TableName.Groups)); @@ -32,7 +32,7 @@ export const groupDALFactory = (db: TDbClient) => { const findByOrgId = async (orgId: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.Groups) + const docs = await (tx || db.replicaNode())(TableName.Groups) .where(`${TableName.Groups}.orgId`, orgId) .leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`) .select(selectAllTableCols(TableName.Groups)) @@ -60,25 +60,28 @@ export const groupDALFactory = (db: TDbClient) => { }; // special query - const findAllGroupMembers = async ({ + const findAllGroupPossibleMembers = async ({ orgId, groupId, offset = 0, limit, - username + username, // depreciated in favor of search + search }: { orgId: string; groupId: string; offset?: number; limit?: number; username?: string; + search?: string; }) => { try { - let query = db(TableName.OrgMembership) + const query = db + .replicaNode()(TableName.OrgMembership) .where(`${TableName.OrgMembership}.orgId`, orgId) .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) - .leftJoin(TableName.UserGroupMembership, function () { - this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn( + .leftJoin(TableName.UserGroupMembership, (bd) => { + bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn( `${TableName.UserGroupMembership}.groupId`, "=", db.raw("?", [groupId]) @@ -91,31 +94,39 @@ export const groupDALFactory = (db: TDbClient) => { db.ref("username").withSchema(TableName.Users), db.ref("firstName").withSchema(TableName.Users), db.ref("lastName").withSchema(TableName.Users), - db.ref("id").withSchema(TableName.Users).as("userId") + db.ref("id").withSchema(TableName.Users).as("userId"), + db.raw(`count(*) OVER() as total_count`) ) .where({ isGhost: false }) - .offset(offset); + .offset(offset) + .orderBy("firstName", "asc"); if (limit) { - query = query.limit(limit); + void query.limit(limit); } - if (username) { - query = query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`); + if (search) { + void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike '%${search}%'`); + } else if (username) { + void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`); } const members = await query; - return members.map( - ({ email, username: memberUsername, firstName, lastName, userId, groupId: memberGroupId }) => ({ - id: userId, - email, - username: memberUsername, - firstName, - lastName, - isPartOfGroup: !!memberGroupId - }) - ); + return { + members: members.map( + ({ email, username: memberUsername, firstName, lastName, userId, groupId: memberGroupId }) => ({ + id: userId, + email, + username: memberUsername, + firstName, + lastName, + isPartOfGroup: !!memberGroupId + }) + ), + // @ts-expect-error col select is raw and not strongly typed + totalCount: Number(members?.[0]?.total_count ?? 0) + }; } catch (error) { throw new DatabaseError({ error, name: "Find all org members" }); } @@ -124,7 +135,7 @@ export const groupDALFactory = (db: TDbClient) => { return { findGroups, findByOrgId, - findAllGroupMembers, + findAllGroupPossibleMembers, ...groupOrm }; }; diff --git a/backend/src/ee/services/group/group-fns.ts b/backend/src/ee/services/group/group-fns.ts index 4f96ddbf09..72d052b298 100644 --- a/backend/src/ee/services/group/group-fns.ts +++ b/backend/src/ee/services/group/group-fns.ts @@ -2,7 +2,7 @@ import { Knex } from "knex"; import { SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas"; import { decryptAsymmetric, encryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; -import { BadRequestError, ScimRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, ScimRequestError } from "@app/lib/errors"; import { TAddUsersToGroup, @@ -73,24 +73,24 @@ const addAcceptedUsersToGroup = async ({ const ghostUser = await projectDAL.findProjectGhostUser(projectId, tx); if (!ghostUser) { - throw new BadRequestError({ - message: "Failed to find sudo user" + throw new NotFoundError({ + message: `Failed to find project owner of project with ID '${projectId}'` }); } const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId, tx); if (!ghostUserLatestKey) { - throw new BadRequestError({ - message: "Failed to find sudo user latest key" + throw new NotFoundError({ + message: `Failed to find project owner's latest key in project with ID '${projectId}'` }); } const bot = await projectBotDAL.findOne({ projectId }, tx); if (!bot) { - throw new BadRequestError({ - message: "Failed to find bot" + throw new NotFoundError({ + message: `Failed to find project bot in project with ID '${projectId}'` }); } @@ -200,7 +200,7 @@ export const addUsersToGroupByUserIds = async ({ userIds.forEach((userId) => { if (!existingUserOrgMembershipsUserIdsSet.has(userId)) - throw new BadRequestError({ + throw new ForbiddenRequestError({ message: `User with id ${userId} is not part of the organization` }); }); @@ -303,7 +303,7 @@ export const removeUsersFromGroupByUserIds = async ({ userIds.forEach((userId) => { if (!existingUserGroupMembershipsUserIdsSet.has(userId)) - throw new BadRequestError({ + throw new ForbiddenRequestError({ message: `User(s) are not part of the group ${group.slug}` }); }); @@ -336,31 +336,36 @@ export const removeUsersFromGroupByUserIds = async ({ ) ); - // TODO: this part can be optimized - for await (const userId of userIds) { - const t = await userGroupMembershipDAL.filterProjectsByUserMembership(userId, group.id, projectIds, tx); - const projectsToDeleteKeyFor = projectIds.filter((p) => !t.has(p)); + const promises: Array> = []; + for (const userId of userIds) { + promises.push( + (async () => { + const t = await userGroupMembershipDAL.filterProjectsByUserMembership(userId, group.id, projectIds, tx); + const projectsToDeleteKeyFor = projectIds.filter((p) => !t.has(p)); - if (projectsToDeleteKeyFor.length) { - await projectKeyDAL.delete( - { - receiverId: userId, - $in: { - projectId: projectsToDeleteKeyFor - } - }, - tx - ); - } + if (projectsToDeleteKeyFor.length) { + await projectKeyDAL.delete( + { + receiverId: userId, + $in: { + projectId: projectsToDeleteKeyFor + } + }, + tx + ); + } - await userGroupMembershipDAL.delete( - { - groupId: group.id, - userId - }, - tx + await userGroupMembershipDAL.delete( + { + groupId: group.id, + userId + }, + tx + ); + })() ); } + await Promise.all(promises); } if (membersToRemoveFromGroupPending.length) { @@ -410,7 +415,7 @@ export const convertPendingGroupAdditionsToGroupMemberships = async ({ const usersUserIdsSet = new Set(users.map((u) => u.id)); userIds.forEach((userId) => { if (!usersUserIdsSet.has(userId)) { - throw new BadRequestError({ + throw new NotFoundError({ message: `Failed to find user with id ${userId}` }); } diff --git a/backend/src/ee/services/group/group-service.ts b/backend/src/ee/services/group/group-service.ts index e6a151bf75..7e7139a6b0 100644 --- a/backend/src/ee/services/group/group-service.ts +++ b/backend/src/ee/services/group/group-service.ts @@ -3,7 +3,7 @@ import slugify from "@sindresorhus/slugify"; import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas"; import { isAtLeastAsPrivileged } from "@app/lib/casl"; -import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; @@ -21,6 +21,7 @@ import { TAddUserToGroupDTO, TCreateGroupDTO, TDeleteGroupDTO, + TGetGroupByIdDTO, TListGroupUsersDTO, TRemoveUserFromGroupDTO, TUpdateGroupDTO @@ -29,7 +30,10 @@ import { TUserGroupMembershipDALFactory } from "./user-group-membership-dal"; type TGroupServiceFactoryDep = { userDAL: Pick; - groupDAL: Pick; + groupDAL: Pick< + TGroupDALFactory, + "create" | "findOne" | "update" | "delete" | "findAllGroupPossibleMembers" | "findById" + >; groupProjectDAL: Pick; orgDAL: Pick; userGroupMembershipDAL: Pick< @@ -58,7 +62,7 @@ export const groupServiceFactory = ({ licenseService }: TGroupServiceFactoryDep) => { const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" }); + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); const { permission } = await permissionService.getOrgPermission( actor, @@ -81,7 +85,8 @@ export const groupServiceFactory = ({ ); const isCustomRole = Boolean(customRole); const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission); - if (!hasRequiredPriviledges) throw new BadRequestError({ message: "Failed to create a more privileged group" }); + if (!hasRequiredPriviledges) + throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" }); const group = await groupDAL.create({ name, @@ -95,7 +100,7 @@ export const groupServiceFactory = ({ }; const updateGroup = async ({ - currentSlug, + id, name, slug, role, @@ -104,7 +109,7 @@ export const groupServiceFactory = ({ actorAuthMethod, actorOrgId }: TUpdateGroupDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" }); + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); const { permission } = await permissionService.getOrgPermission( actor, @@ -118,11 +123,13 @@ export const groupServiceFactory = ({ const plan = await licenseService.getPlan(actorOrgId); if (!plan.groups) throw new BadRequestError({ - message: "Failed to update group due to plan restrictio Upgrade plan to update group." + message: "Failed to update group due to plan restriction Upgrade plan to update group." }); - const group = await groupDAL.findOne({ orgId: actorOrgId, slug: currentSlug }); - if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${currentSlug}` }); + const group = await groupDAL.findOne({ orgId: actorOrgId, id }); + if (!group) { + throw new NotFoundError({ message: `Failed to find group with ID ${id}` }); + } let customRole: TOrgRoles | undefined; if (role) { @@ -134,14 +141,13 @@ export const groupServiceFactory = ({ const isCustomRole = Boolean(customOrgRole); const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission); if (!hasRequiredNewRolePermission) - throw new BadRequestError({ message: "Failed to create a more privileged group" }); + throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" }); if (isCustomRole) customRole = customOrgRole; } const [updatedGroup] = await groupDAL.update( { - orgId: actorOrgId, - slug: currentSlug + id: group.id }, { name, @@ -158,8 +164,8 @@ export const groupServiceFactory = ({ return updatedGroup; }; - const deleteGroup = async ({ groupSlug, actor, actorId, actorAuthMethod, actorOrgId }: TDeleteGroupDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" }); + const deleteGroup = async ({ id, actor, actorId, actorAuthMethod, actorOrgId }: TDeleteGroupDTO) => { + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); const { permission } = await permissionService.getOrgPermission( actor, @@ -178,24 +184,47 @@ export const groupServiceFactory = ({ }); const [group] = await groupDAL.delete({ - orgId: actorOrgId, - slug: groupSlug + id, + orgId: actorOrgId }); return group; }; + const getGroupById = async ({ id, actor, actorId, actorAuthMethod, actorOrgId }: TGetGroupByIdDTO) => { + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups); + + const group = await groupDAL.findById(id); + if (!group) { + throw new NotFoundError({ + message: `Cannot find group with ID ${id}` + }); + } + + return group; + }; + const listGroupUsers = async ({ - groupSlug, + id, offset, limit, username, actor, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + search }: TListGroupUsersDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" }); + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); const { permission } = await permissionService.getOrgPermission( actor, @@ -208,36 +237,28 @@ export const groupServiceFactory = ({ const group = await groupDAL.findOne({ orgId: actorOrgId, - slug: groupSlug + id }); if (!group) - throw new BadRequestError({ - message: `Failed to find group with slug ${groupSlug}` + throw new NotFoundError({ + message: `Failed to find group with ID ${id}` }); - const users = await groupDAL.findAllGroupMembers({ + const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({ orgId: group.orgId, groupId: group.id, offset, limit, - username + username, + search }); - const count = await orgDAL.countAllOrgMembers(group.orgId); - - return { users, totalCount: count }; + return { users: members, totalCount }; }; - const addUserToGroup = async ({ - groupSlug, - username, - actor, - actorId, - actorAuthMethod, - actorOrgId - }: TAddUserToGroupDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" }); + const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => { + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); const { permission } = await permissionService.getOrgPermission( actor, @@ -251,12 +272,12 @@ export const groupServiceFactory = ({ // check if group with slug exists const group = await groupDAL.findOne({ orgId: actorOrgId, - slug: groupSlug + id }); if (!group) - throw new BadRequestError({ - message: `Failed to find group with slug ${groupSlug}` + throw new NotFoundError({ + message: `Failed to find group with ID ${id}` }); const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId); @@ -267,7 +288,7 @@ export const groupServiceFactory = ({ throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" }); const user = await userDAL.findOne({ username }); - if (!user) throw new BadRequestError({ message: `Failed to find user with username ${username}` }); + if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` }); const users = await addUsersToGroupByUserIds({ group, @@ -285,14 +306,14 @@ export const groupServiceFactory = ({ }; const removeUserFromGroup = async ({ - groupSlug, + id, username, actor, actorId, actorAuthMethod, actorOrgId }: TRemoveUserFromGroupDTO) => { - if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" }); + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); const { permission } = await permissionService.getOrgPermission( actor, @@ -306,12 +327,12 @@ export const groupServiceFactory = ({ // check if group with slug exists const group = await groupDAL.findOne({ orgId: actorOrgId, - slug: groupSlug + id }); if (!group) - throw new BadRequestError({ - message: `Failed to find group with slug ${groupSlug}` + throw new NotFoundError({ + message: `Failed to find group with ID ${id}` }); const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId); @@ -322,7 +343,7 @@ export const groupServiceFactory = ({ throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" }); const user = await userDAL.findOne({ username }); - if (!user) throw new BadRequestError({ message: `Failed to find user with username ${username}` }); + if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` }); const users = await removeUsersFromGroupByUserIds({ group, @@ -342,6 +363,7 @@ export const groupServiceFactory = ({ deleteGroup, listGroupUsers, addUserToGroup, - removeUserFromGroup + removeUserFromGroup, + getGroupById }; }; diff --git a/backend/src/ee/services/group/group-types.ts b/backend/src/ee/services/group/group-types.ts index ca9831ffbb..a6eb4782b3 100644 --- a/backend/src/ee/services/group/group-types.ts +++ b/backend/src/ee/services/group/group-types.ts @@ -17,7 +17,7 @@ export type TCreateGroupDTO = { } & TGenericPermission; export type TUpdateGroupDTO = { - currentSlug: string; + id: string; } & Partial<{ name: string; slug: string; @@ -26,23 +26,28 @@ export type TUpdateGroupDTO = { TGenericPermission; export type TDeleteGroupDTO = { - groupSlug: string; + id: string; +} & TGenericPermission; + +export type TGetGroupByIdDTO = { + id: string; } & TGenericPermission; export type TListGroupUsersDTO = { - groupSlug: string; + id: string; offset: number; limit: number; username?: string; + search?: string; } & TGenericPermission; export type TAddUserToGroupDTO = { - groupSlug: string; + id: string; username: string; } & TGenericPermission; export type TRemoveUserFromGroupDTO = { - groupSlug: string; + id: string; username: string; } & TGenericPermission; diff --git a/backend/src/ee/services/group/user-group-membership-dal.ts b/backend/src/ee/services/group/user-group-membership-dal.ts index 1ab1839c55..be654b338c 100644 --- a/backend/src/ee/services/group/user-group-membership-dal.ts +++ b/backend/src/ee/services/group/user-group-membership-dal.ts @@ -18,7 +18,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => { */ const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[], tx?: Knex) => { try { - const userProjectMemberships: string[] = await (tx || db)(TableName.ProjectMembership) + const userProjectMemberships: string[] = await (tx || db.replicaNode())(TableName.ProjectMembership) .where(`${TableName.ProjectMembership}.userId`, userId) .whereIn(`${TableName.ProjectMembership}.projectId`, projectIds) .pluck(`${TableName.ProjectMembership}.projectId`); @@ -41,9 +41,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => { }; // special query - const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => { + const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string, tx?: Knex) => { try { - const usernameDocs: string[] = await db(TableName.UserGroupMembership) + const usernameDocs: string[] = await (tx || db.replicaNode())(TableName.UserGroupMembership) .join( TableName.GroupProjectMembership, `${TableName.UserGroupMembership}.groupId`, @@ -73,7 +73,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => { try { // get list of groups in the project with id [projectId] // that that are not the group with id [groupId] - const groups: string[] = await (tx || db)(TableName.GroupProjectMembership) + const groups: string[] = await (tx || db.replicaNode())(TableName.GroupProjectMembership) .where(`${TableName.GroupProjectMembership}.projectId`, projectId) .whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId) .pluck(`${TableName.GroupProjectMembership}.groupId`); @@ -83,8 +83,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => { .where(`${TableName.UserGroupMembership}.groupId`, groupId) .where(`${TableName.UserGroupMembership}.isPending`, false) .join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`) - .leftJoin(TableName.ProjectMembership, function () { - this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn( + .leftJoin(TableName.ProjectMembership, (bd) => { + bd.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn( `${TableName.ProjectMembership}.projectId`, "=", db.raw("?", [projectId]) @@ -107,9 +107,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => { db.ref("publicKey").withSchema(TableName.UserEncryptionKey) ) .where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER - .whereNotIn(`${TableName.UserGroupMembership}.userId`, function () { + .whereNotIn(`${TableName.UserGroupMembership}.userId`, (bd) => { // eslint-disable-next-line @typescript-eslint/no-floating-promises - this.select(`${TableName.UserGroupMembership}.userId`) + bd.select(`${TableName.UserGroupMembership}.userId`) .from(TableName.UserGroupMembership) .whereIn(`${TableName.UserGroupMembership}.groupId`, groups); }); @@ -161,11 +161,60 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => { } }; + const findGroupMembershipsByUserIdInOrg = async (userId: string, orgId: string) => { + try { + const docs = await db + .replicaNode()(TableName.UserGroupMembership) + .join(TableName.Groups, `${TableName.UserGroupMembership}.groupId`, `${TableName.Groups}.id`) + .join(TableName.OrgMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.OrgMembership}.userId`) + .join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`) + .where(`${TableName.UserGroupMembership}.userId`, userId) + .where(`${TableName.Groups}.orgId`, orgId) + .select( + db.ref("id").withSchema(TableName.UserGroupMembership), + db.ref("groupId").withSchema(TableName.UserGroupMembership), + db.ref("name").withSchema(TableName.Groups).as("groupName"), + db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"), + db.ref("firstName").withSchema(TableName.Users).as("firstName"), + db.ref("lastName").withSchema(TableName.Users).as("lastName") + ); + + return docs; + } catch (error) { + throw new DatabaseError({ error, name: "Find group memberships by user id in org" }); + } + }; + + const findGroupMembershipsByGroupIdInOrg = async (groupId: string, orgId: string) => { + try { + const docs = await db + .replicaNode()(TableName.UserGroupMembership) + .join(TableName.Groups, `${TableName.UserGroupMembership}.groupId`, `${TableName.Groups}.id`) + .join(TableName.OrgMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.OrgMembership}.userId`) + .join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`) + .where(`${TableName.Groups}.id`, groupId) + .where(`${TableName.Groups}.orgId`, orgId) + .select( + db.ref("id").withSchema(TableName.UserGroupMembership), + db.ref("groupId").withSchema(TableName.UserGroupMembership), + db.ref("name").withSchema(TableName.Groups).as("groupName"), + db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"), + db.ref("firstName").withSchema(TableName.Users).as("firstName"), + db.ref("lastName").withSchema(TableName.Users).as("lastName") + ); + return docs; + } catch (error) { + throw new DatabaseError({ error, name: "Find group memberships by group id in org" }); + } + }; + return { ...userGroupMembershipOrm, filterProjectsByUserMembership, findUserGroupMembershipsInProject, findGroupMembersNotInProject, - deletePendingUserGroupMembershipsByUserIds + deletePendingUserGroupMembershipsByUserIds, + findGroupMembershipsByUserIdInOrg, + findGroupMembershipsByGroupIdInOrg }; }; diff --git a/backend/src/ee/services/hsm/hsm-fns.ts b/backend/src/ee/services/hsm/hsm-fns.ts new file mode 100644 index 0000000000..f91f9a0042 --- /dev/null +++ b/backend/src/ee/services/hsm/hsm-fns.ts @@ -0,0 +1,58 @@ +import * as pkcs11js from "pkcs11js"; + +import { getConfig } from "@app/lib/config/env"; +import { logger } from "@app/lib/logger"; + +import { HsmModule } from "./hsm-types"; + +export const initializeHsmModule = () => { + const appCfg = getConfig(); + + // Create a new instance of PKCS11 module + const pkcs11 = new pkcs11js.PKCS11(); + let isInitialized = false; + + const initialize = () => { + if (!appCfg.isHsmConfigured) { + return; + } + + try { + // Load the PKCS#11 module + pkcs11.load(appCfg.HSM_LIB_PATH!); + + // Initialize the module + pkcs11.C_Initialize(); + isInitialized = true; + + logger.info("PKCS#11 module initialized"); + } catch (err) { + logger.error("Failed to initialize PKCS#11 module:", err); + throw err; + } + }; + + const finalize = () => { + if (isInitialized) { + try { + pkcs11.C_Finalize(); + isInitialized = false; + logger.info("PKCS#11 module finalized"); + } catch (err) { + logger.error("Failed to finalize PKCS#11 module:", err); + throw err; + } + } + }; + + const getModule = (): HsmModule => ({ + pkcs11, + isInitialized + }); + + return { + initialize, + finalize, + getModule + }; +}; diff --git a/backend/src/ee/services/hsm/hsm-service.ts b/backend/src/ee/services/hsm/hsm-service.ts new file mode 100644 index 0000000000..a1a0773fc4 --- /dev/null +++ b/backend/src/ee/services/hsm/hsm-service.ts @@ -0,0 +1,470 @@ +import pkcs11js from "pkcs11js"; + +import { getConfig } from "@app/lib/config/env"; +import { logger } from "@app/lib/logger"; + +import { HsmKeyType, HsmModule } from "./hsm-types"; + +type THsmServiceFactoryDep = { + hsmModule: HsmModule; +}; + +export type THsmServiceFactory = ReturnType; + +type SyncOrAsync = T | Promise; +type SessionCallback = (session: pkcs11js.Handle) => SyncOrAsync; + +// eslint-disable-next-line no-empty-pattern +export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => { + const appCfg = getConfig(); + + // Constants for buffer structures + const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc + const BLOCK_SIZE = 16; + const HMAC_SIZE = 32; + + const AES_KEY_SIZE = 256; + const HMAC_KEY_SIZE = 256; + + const $withSession = async (callbackWithSession: SessionCallback): Promise => { + const RETRY_INTERVAL = 200; // 200ms between attempts + const MAX_TIMEOUT = 90_000; // 90 seconds maximum total time + + let sessionHandle: pkcs11js.Handle | null = null; + + const removeSession = () => { + if (sessionHandle !== null) { + try { + pkcs11.C_Logout(sessionHandle); + pkcs11.C_CloseSession(sessionHandle); + logger.info("HSM: Terminated session successfully"); + } catch (error) { + logger.error(error, "HSM: Failed to terminate session"); + } finally { + sessionHandle = null; + } + } + }; + + try { + if (!pkcs11 || !isInitialized) { + throw new Error("PKCS#11 module is not initialized"); + } + + // Get slot list + let slots: pkcs11js.Handle[]; + try { + slots = pkcs11.C_GetSlotList(false); // false to get all slots + } catch (error) { + throw new Error(`Failed to get slot list: ${(error as Error)?.message}`); + } + + if (slots.length === 0) { + throw new Error("No slots available"); + } + + if (appCfg.HSM_SLOT >= slots.length) { + throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`); + } + + const slotId = slots[appCfg.HSM_SLOT]; + + const startTime = Date.now(); + while (Date.now() - startTime < MAX_TIMEOUT) { + try { + // Open session + // eslint-disable-next-line no-bitwise + sessionHandle = pkcs11.C_OpenSession(slotId, pkcs11js.CKF_SERIAL_SESSION | pkcs11js.CKF_RW_SESSION); + + // Login + try { + pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN); + logger.info("HSM: Successfully authenticated"); + break; + } catch (error) { + // Handle specific error cases + if (error instanceof pkcs11js.Pkcs11Error) { + if (error.code === pkcs11js.CKR_PIN_INCORRECT) { + // We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material + logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`); + throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration."); + } + if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) { + logger.warn("HSM: Session already logged in"); + } + } + throw error; // Re-throw other errors + } + } catch (error) { + logger.warn(`HSM: Session creation failed. Retrying... Error: ${(error as Error)?.message}`); + + if (sessionHandle !== null) { + try { + pkcs11.C_CloseSession(sessionHandle); + } catch (closeError) { + logger.error(closeError, "HSM: Failed to close session"); + } + sessionHandle = null; + } + + // Wait before retrying + // eslint-disable-next-line no-await-in-loop + await new Promise((resolve) => { + setTimeout(resolve, RETRY_INTERVAL); + }); + } + } + + if (sessionHandle === null) { + throw new Error("HSM: Failed to open session after maximum retries"); + } + + // Execute callback with session handle + const result = await callbackWithSession(sessionHandle); + removeSession(); + return result; + } catch (error) { + logger.error(error, "HSM: Failed to open session"); + throw error; + } finally { + // Ensure cleanup + removeSession(); + } + }; + + const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => { + const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL; + const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES; + + const template = [ + { type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY }, + { type: pkcs11js.CKA_KEY_TYPE, value: keyType }, + { type: pkcs11js.CKA_LABEL, value: label } + ]; + + try { + // Initialize search + pkcs11.C_FindObjectsInit(sessionHandle, template); + + try { + // Find first matching object + const handles = pkcs11.C_FindObjects(sessionHandle, 1); + + if (handles.length === 0) { + throw new Error("Failed to find master key"); + } + + return handles[0]; // Return the key handle + } finally { + // Always finalize the search operation + pkcs11.C_FindObjectsFinal(sessionHandle); + } + } catch (error) { + return null; + } + }; + + const $keyExists = (session: pkcs11js.Handle, type: HsmKeyType): boolean => { + try { + const key = $findKey(session, type); + // items(0) will throw an error if no items are found + // Return true only if we got a valid object with handle + return !!key && key.length > 0; + } catch (error) { + // If items(0) throws, it means no key was found + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-call + logger.error(error, "HSM: Failed while checking for HSM key presence"); + + if (error instanceof pkcs11js.Pkcs11Error) { + if (error.code === pkcs11js.CKR_OBJECT_HANDLE_INVALID) { + return false; + } + } + + return false; + } + }; + + const encrypt: { + (data: Buffer, providedSession: pkcs11js.Handle): Promise; + (data: Buffer): Promise; + } = async (data: Buffer, providedSession?: pkcs11js.Handle) => { + if (!pkcs11 || !isInitialized) { + throw new Error("PKCS#11 module is not initialized"); + } + + const $performEncryption = (sessionHandle: pkcs11js.Handle) => { + try { + const aesKey = $findKey(sessionHandle, HsmKeyType.AES); + if (!aesKey) { + throw new Error("HSM: Encryption failed, AES key not found"); + } + + const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC); + if (!hmacKey) { + throw new Error("HSM: Encryption failed, HMAC key not found"); + } + + const iv = Buffer.alloc(IV_LENGTH); + pkcs11.C_GenerateRandom(sessionHandle, iv); + + const encryptMechanism = { + mechanism: pkcs11js.CKM_AES_CBC_PAD, + parameter: iv + }; + + pkcs11.C_EncryptInit(sessionHandle, encryptMechanism, aesKey); + + // Calculate max buffer size (input length + potential full block of padding) + const maxEncryptedLength = Math.ceil(data.length / BLOCK_SIZE) * BLOCK_SIZE + BLOCK_SIZE; + + // Encrypt the data - this returns the encrypted data directly + const encryptedData = pkcs11.C_Encrypt(sessionHandle, data, Buffer.alloc(maxEncryptedLength)); + + // Initialize HMAC + const hmacMechanism = { + mechanism: pkcs11js.CKM_SHA256_HMAC + }; + + pkcs11.C_SignInit(sessionHandle, hmacMechanism, hmacKey); + + // Sign the IV and encrypted data + pkcs11.C_SignUpdate(sessionHandle, iv); + pkcs11.C_SignUpdate(sessionHandle, encryptedData); + + // Get the HMAC + const hmac = Buffer.alloc(HMAC_SIZE); + pkcs11.C_SignFinal(sessionHandle, hmac); + + // Combine encrypted data and HMAC [Encrypted Data | HMAC] + const finalBuffer = Buffer.alloc(encryptedData.length + hmac.length); + encryptedData.copy(finalBuffer); + hmac.copy(finalBuffer, encryptedData.length); + + return Buffer.concat([iv, finalBuffer]); + } catch (error) { + logger.error(error, "HSM: Failed to perform encryption"); + throw new Error(`HSM: Encryption failed: ${(error as Error)?.message}`); + } + }; + + if (providedSession) { + return $performEncryption(providedSession); + } + + const result = await $withSession($performEncryption); + return result; + }; + + const decrypt: { + (encryptedBlob: Buffer, providedSession: pkcs11js.Handle): Promise; + (encryptedBlob: Buffer): Promise; + } = async (encryptedBlob: Buffer, providedSession?: pkcs11js.Handle) => { + if (!pkcs11 || !isInitialized) { + throw new Error("PKCS#11 module is not initialized"); + } + + const $performDecryption = (sessionHandle: pkcs11js.Handle) => { + try { + // structure is: [IV (16 bytes) | Encrypted Data (N bytes) | HMAC (32 bytes)] + const iv = encryptedBlob.subarray(0, IV_LENGTH); + const encryptedDataWithHmac = encryptedBlob.subarray(IV_LENGTH); + + // Split encrypted data and HMAC + const hmac = encryptedDataWithHmac.subarray(-HMAC_SIZE); // Last 32 bytes are HMAC + + const encryptedData = encryptedDataWithHmac.subarray(0, -HMAC_SIZE); // Everything except last 32 bytes + + // Find the keys + const aesKey = $findKey(sessionHandle, HsmKeyType.AES); + if (!aesKey) { + throw new Error("HSM: Decryption failed, AES key not found"); + } + + const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC); + if (!hmacKey) { + throw new Error("HSM: Decryption failed, HMAC key not found"); + } + + // Verify HMAC first + const hmacMechanism = { + mechanism: pkcs11js.CKM_SHA256_HMAC + }; + + pkcs11.C_VerifyInit(sessionHandle, hmacMechanism, hmacKey); + pkcs11.C_VerifyUpdate(sessionHandle, iv); + pkcs11.C_VerifyUpdate(sessionHandle, encryptedData); + + try { + pkcs11.C_VerifyFinal(sessionHandle, hmac); + } catch (error) { + logger.error(error, "HSM: HMAC verification failed"); + throw new Error("HSM: Decryption failed"); // Generic error for failed verification + } + + // Only decrypt if verification passed + const decryptMechanism = { + mechanism: pkcs11js.CKM_AES_CBC_PAD, + parameter: iv + }; + + pkcs11.C_DecryptInit(sessionHandle, decryptMechanism, aesKey); + + const tempBuffer = Buffer.alloc(encryptedData.length); + const decryptedData = pkcs11.C_Decrypt(sessionHandle, encryptedData, tempBuffer); + + // Create a new buffer from the decrypted data + return Buffer.from(decryptedData); + } catch (error) { + logger.error(error, "HSM: Failed to perform decryption"); + throw new Error("HSM: Decryption failed"); // Generic error for failed decryption, to avoid leaking details about why it failed (such as padding related errors) + } + }; + + if (providedSession) { + return $performDecryption(providedSession); + } + + const result = await $withSession($performDecryption); + return result; + }; + + // We test the core functionality of the PKCS#11 module that we are using throughout Infisical. This is to ensure that the user doesn't configure a faulty or unsupported HSM device. + const $testPkcs11Module = async (session: pkcs11js.Handle) => { + try { + if (!pkcs11 || !isInitialized) { + throw new Error("PKCS#11 module is not initialized"); + } + + if (!session) { + throw new Error("HSM: Attempted to run test without a valid session"); + } + + const randomData = pkcs11.C_GenerateRandom(session, Buffer.alloc(500)); + + const encryptedData = await encrypt(randomData, session); + const decryptedData = await decrypt(encryptedData, session); + + const randomDataHex = randomData.toString("hex"); + const decryptedDataHex = decryptedData.toString("hex"); + + if (randomDataHex !== decryptedDataHex && Buffer.compare(randomData, decryptedData)) { + throw new Error("HSM: Startup test failed. Decrypted data does not match original data"); + } + + return true; + } catch (error) { + logger.error(error, "HSM: Error testing PKCS#11 module"); + return false; + } + }; + + const isActive = async () => { + if (!isInitialized || !appCfg.isHsmConfigured) { + return false; + } + + let pkcs11TestPassed = false; + + try { + pkcs11TestPassed = await $withSession($testPkcs11Module); + } catch (err) { + logger.error(err, "HSM: Error testing PKCS#11 module"); + } + + return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed; + }; + + const startService = async () => { + if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return; + + try { + await $withSession(async (sessionHandle) => { + // Check if master key exists, create if not + + const genericAttributes = [ + { type: pkcs11js.CKA_TOKEN, value: true }, // Persistent storage + { type: pkcs11js.CKA_EXTRACTABLE, value: false }, // Cannot be extracted + { type: pkcs11js.CKA_SENSITIVE, value: true }, // Sensitive value + { type: pkcs11js.CKA_PRIVATE, value: true } // Requires authentication + ]; + + if (!$keyExists(sessionHandle, HsmKeyType.AES)) { + // Template for generating 256-bit AES master key + const keyTemplate = [ + { type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY }, + { type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES }, + { type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 }, + { type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! }, + { type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption + { type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption + ...genericAttributes + ]; + + // Generate the key + pkcs11.C_GenerateKey( + sessionHandle, + { + mechanism: pkcs11js.CKM_AES_KEY_GEN + }, + keyTemplate + ); + + logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`); + } + + // Check if HMAC key exists, create if not + if (!$keyExists(sessionHandle, HsmKeyType.HMAC)) { + const hmacKeyTemplate = [ + { type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY }, + { type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET }, + { type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key + { type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` }, + { type: pkcs11js.CKA_SIGN, value: true }, // Allow signing + { type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification + ...genericAttributes + ]; + + // Generate the HMAC key + pkcs11.C_GenerateKey( + sessionHandle, + { + mechanism: pkcs11js.CKM_GENERIC_SECRET_KEY_GEN + }, + hmacKeyTemplate + ); + + logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`); + } + + // Get slot info to check supported mechanisms + const slotId = pkcs11.C_GetSessionInfo(sessionHandle).slotID; + const mechanisms = pkcs11.C_GetMechanismList(slotId); + + // Check for AES CBC PAD support + const hasAesCbc = mechanisms.includes(pkcs11js.CKM_AES_CBC_PAD); + + if (!hasAesCbc) { + throw new Error(`Required mechanism CKM_AEC_CBC_PAD not supported by HSM`); + } + + // Run test encryption/decryption + const testPassed = await $testPkcs11Module(sessionHandle); + + if (!testPassed) { + throw new Error("PKCS#11 module test failed. Please ensure that the HSM is correctly configured."); + } + }); + } catch (error) { + logger.error(error, "HSM: Error initializing HSM service:"); + throw error; + } + }; + + return { + encrypt, + startService, + isActive, + decrypt + }; +}; diff --git a/backend/src/ee/services/hsm/hsm-types.ts b/backend/src/ee/services/hsm/hsm-types.ts new file mode 100644 index 0000000000..b688147f58 --- /dev/null +++ b/backend/src/ee/services/hsm/hsm-types.ts @@ -0,0 +1,11 @@ +import pkcs11js from "pkcs11js"; + +export type HsmModule = { + pkcs11: pkcs11js.PKCS11; + isInitialized: boolean; +}; + +export enum HsmKeyType { + AES = "AES", + HMAC = "hmac" +} diff --git a/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-dal.ts b/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-dal.ts new file mode 100644 index 0000000000..a7d8794a4d --- /dev/null +++ b/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-dal.ts @@ -0,0 +1,12 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TIdentityProjectAdditionalPrivilegeV2DALFactory = ReturnType< + typeof identityProjectAdditionalPrivilegeV2DALFactory +>; + +export const identityProjectAdditionalPrivilegeV2DALFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.IdentityProjectAdditionalPrivilege); + return orm; +}; diff --git a/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service.ts b/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service.ts new file mode 100644 index 0000000000..26a694a4a5 --- /dev/null +++ b/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service.ts @@ -0,0 +1,343 @@ +import { ForbiddenError } from "@casl/ability"; +import { packRules } from "@casl/ability/extra"; +import ms from "ms"; + +import { TableName } from "@app/db/schemas"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission"; +import { ActorType } from "@app/services/auth/auth-type"; +import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; + +import { TPermissionServiceFactory } from "../permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission"; +import { TIdentityProjectAdditionalPrivilegeV2DALFactory } from "./identity-project-additional-privilege-v2-dal"; +import { + IdentityProjectAdditionalPrivilegeTemporaryMode, + TCreateIdentityPrivilegeDTO, + TDeleteIdentityPrivilegeByIdDTO, + TGetIdentityPrivilegeDetailsByIdDTO, + TGetIdentityPrivilegeDetailsBySlugDTO, + TListIdentityPrivilegesDTO, + TUpdateIdentityPrivilegeByIdDTO +} from "./identity-project-additional-privilege-v2-types"; + +type TIdentityProjectAdditionalPrivilegeV2ServiceFactoryDep = { + identityProjectAdditionalPrivilegeDAL: TIdentityProjectAdditionalPrivilegeV2DALFactory; + identityProjectDAL: Pick; + projectDAL: Pick; + permissionService: Pick; +}; + +export type TIdentityProjectAdditionalPrivilegeV2ServiceFactory = ReturnType< + typeof identityProjectAdditionalPrivilegeV2ServiceFactory +>; + +export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({ + identityProjectAdditionalPrivilegeDAL, + identityProjectDAL, + projectDAL, + permissionService +}: TIdentityProjectAdditionalPrivilegeV2ServiceFactoryDep) => { + const create = async ({ + slug, + actor, + actorId, + projectId, + actorOrgId, + identityId, + permissions: customPermission, + actorAuthMethod, + ...dto + }: TCreateIdentityPrivilegeDTO) => { + const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); + if (!identityProjectMembership) + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); + const { permission: targetIdentityPermission } = await permissionService.getProjectPermission( + ActorType.IDENTITY, + identityId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + + // we need to validate that the privilege given is not higher than the assigning users permission + // @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules + targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission)); + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission); + if (!hasRequiredPriviledges) + throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" }); + + const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({ + slug, + projectMembershipId: identityProjectMembership.id + }); + if (existingSlug) throw new BadRequestError({ message: "Additional privilege with provided slug already exists" }); + + const packedPermission = JSON.stringify(packRules(customPermission)); + if (!dto.isTemporary) { + const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({ + projectMembershipId: identityProjectMembership.id, + slug, + permissions: packedPermission + }); + + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; + } + + const relativeTempAllocatedTimeInMs = ms(dto.temporaryRange); + const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({ + projectMembershipId: identityProjectMembership.id, + slug, + permissions: packedPermission, + isTemporary: true, + temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative, + temporaryRange: dto.temporaryRange, + temporaryAccessStartTime: new Date(dto.temporaryAccessStartTime), + temporaryAccessEndTime: new Date(new Date(dto.temporaryAccessStartTime).getTime() + relativeTempAllocatedTimeInMs) + }); + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; + }; + + const updateById = async ({ + id, + data, + actorOrgId, + actor, + actorId, + actorAuthMethod + }: TUpdateIdentityPrivilegeByIdDTO) => { + const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findById(id); + if (!identityPrivilege) throw new NotFoundError({ message: `Identity privilege with ${id} not found` }); + + const identityProjectMembership = await identityProjectDAL.findOne({ id: identityPrivilege.projectMembershipId }); + if (!identityProjectMembership) + throw new NotFoundError({ + message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}` + }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); + const { permission: targetIdentityPermission } = await permissionService.getProjectPermission( + ActorType.IDENTITY, + identityProjectMembership.identityId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + + // we need to validate that the privilege given is not higher than the assigning users permission + // @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules + targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || [])); + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission); + if (!hasRequiredPriviledges) + throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" }); + + if (data?.slug) { + const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({ + slug: data.slug, + projectMembershipId: identityProjectMembership.id + }); + if (existingSlug && existingSlug.id !== identityPrivilege.id) + throw new BadRequestError({ message: "Additional privilege with provided slug already exists" }); + } + + const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary; + const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined; + if (isTemporary) { + const temporaryAccessStartTime = data?.temporaryAccessStartTime || identityPrivilege?.temporaryAccessStartTime; + const temporaryRange = data?.temporaryRange || identityPrivilege?.temporaryRange; + const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, { + slug: data.slug, + permissions: packedPermission, + isTemporary: data.isTemporary, + temporaryRange: data.temporaryRange, + temporaryMode: data.temporaryMode, + temporaryAccessStartTime: new Date(temporaryAccessStartTime || ""), + temporaryAccessEndTime: new Date(new Date(temporaryAccessStartTime || "").getTime() + ms(temporaryRange || "")) + }); + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; + } + + const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, { + slug: data.slug, + permissions: packedPermission, + isTemporary: false, + temporaryAccessStartTime: null, + temporaryAccessEndTime: null, + temporaryRange: null, + temporaryMode: null + }); + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; + }; + + const deleteById = async ({ actorId, id, actor, actorOrgId, actorAuthMethod }: TDeleteIdentityPrivilegeByIdDTO) => { + const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findById(id); + if (!identityPrivilege) throw new NotFoundError({ message: `Identity privilege with ${id} not found` }); + + const identityProjectMembership = await identityProjectDAL.findOne({ id: identityPrivilege.projectMembershipId }); + if (!identityProjectMembership) + throw new NotFoundError({ + message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}` + }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity); + const { permission: identityRolePermission } = await permissionService.getProjectPermission( + ActorType.IDENTITY, + identityProjectMembership.identityId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission); + if (!hasRequiredPriviledges) + throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" }); + + const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id); + return { + ...deletedPrivilege, + permissions: unpackPermissions(deletedPrivilege.permissions) + }; + }; + + const getPrivilegeDetailsById = async ({ + id, + actorOrgId, + actor, + actorId, + actorAuthMethod + }: TGetIdentityPrivilegeDetailsByIdDTO) => { + const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findById(id); + if (!identityPrivilege) throw new NotFoundError({ message: `Identity privilege with ${id} not found` }); + + const identityProjectMembership = await identityProjectDAL.findOne({ id: identityPrivilege.projectMembershipId }); + if (!identityProjectMembership) + throw new NotFoundError({ + message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}` + }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity); + + return { + ...identityPrivilege, + permissions: unpackPermissions(identityPrivilege.permissions) + }; + }; + + const getPrivilegeDetailsBySlug = async ({ + identityId, + slug, + projectSlug, + actorOrgId, + actor, + actorId, + actorAuthMethod + }: TGetIdentityPrivilegeDetailsBySlugDTO) => { + const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: `Project with slug ${slug} not found` }); + const projectId = project.id; + + const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); + if (!identityProjectMembership) + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity); + + const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({ + slug, + projectMembershipId: identityProjectMembership.id + }); + if (!identityPrivilege) throw new NotFoundError({ message: "Identity additional privilege not found" }); + + return { + ...identityPrivilege, + permissions: unpackPermissions(identityPrivilege.permissions) + }; + }; + + const listIdentityProjectPrivileges = async ({ + identityId, + actorOrgId, + actor, + actorId, + actorAuthMethod, + projectId + }: TListIdentityPrivilegesDTO) => { + const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); + if (!identityProjectMembership) + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + identityProjectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); + + const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find( + { + projectMembershipId: identityProjectMembership.id + }, + { sort: [[`${TableName.IdentityProjectAdditionalPrivilege}.slug` as "slug", "asc"]] } + ); + return identityPrivileges; + }; + + return { + getPrivilegeDetailsById, + getPrivilegeDetailsBySlug, + listIdentityProjectPrivileges, + create, + updateById, + deleteById + }; +}; diff --git a/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types.ts b/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types.ts new file mode 100644 index 0000000000..aab6b85101 --- /dev/null +++ b/backend/src/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types.ts @@ -0,0 +1,55 @@ +import { TProjectPermission } from "@app/lib/types"; + +import { TProjectPermissionV2Schema } from "../permission/project-permission"; + +export enum IdentityProjectAdditionalPrivilegeTemporaryMode { + Relative = "relative" +} + +export type TCreateIdentityPrivilegeDTO = { + permissions: TProjectPermissionV2Schema[]; + identityId: string; + projectId: string; + slug: string; +} & ( + | { + isTemporary: false; + } + | { + isTemporary: true; + temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative; + temporaryRange: string; + temporaryAccessStartTime: string; + } +) & + Omit; + +export type TUpdateIdentityPrivilegeByIdDTO = { id: string } & Omit & { + data: Partial<{ + permissions: TProjectPermissionV2Schema[]; + slug: string; + isTemporary: boolean; + temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative; + temporaryRange: string; + temporaryAccessStartTime: string; + }>; + }; + +export type TDeleteIdentityPrivilegeByIdDTO = Omit & { + id: string; +}; + +export type TGetIdentityPrivilegeDetailsByIdDTO = Omit & { + id: string; +}; + +export type TListIdentityPrivilegesDTO = Omit & { + identityId: string; + projectId: string; +}; + +export type TGetIdentityPrivilegeDetailsBySlugDTO = Omit & { + slug: string; + identityId: string; + projectSlug: string; +}; diff --git a/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service.ts b/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service.ts index 70753ee094..4811eb52ae 100644 --- a/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service.ts +++ b/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service.ts @@ -1,10 +1,10 @@ import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability"; -import { PackRule, unpackRules } from "@casl/ability/extra"; +import { PackRule, packRules, unpackRules } from "@casl/ability/extra"; import ms from "ms"; -import { z } from "zod"; import { isAtLeastAsPrivileged } from "@app/lib/casl"; -import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission"; import { ActorType } from "@app/services/auth/auth-type"; import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; @@ -32,22 +32,6 @@ export type TIdentityProjectAdditionalPrivilegeServiceFactory = ReturnType< typeof identityProjectAdditionalPrivilegeServiceFactory >; -// TODO(akhilmhdh): move this to more centralized -export const UnpackedPermissionSchema = z.object({ - subject: z.union([z.string().min(1), z.string().array()]).optional(), - action: z.union([z.string().min(1), z.string().array()]), - conditions: z - .object({ - environment: z.string().optional(), - secretPath: z - .object({ - $glob: z.string().min(1) - }) - .optional() - }) - .optional() -}); - const unpackPermissions = (permissions: unknown) => UnpackedPermissionSchema.array().parse( unpackRules((permissions || []) as PackRule>>[]) @@ -71,12 +55,12 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ ...dto }: TCreateIdentityPrivilegeDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); if (!identityProjectMembership) - throw new BadRequestError({ message: `Failed to find identity with id ${identityId}` }); + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -86,14 +70,18 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); - const { permission: identityRolePermission } = await permissionService.getProjectPermission( + const { permission: targetIdentityPermission } = await permissionService.getProjectPermission( ActorType.IDENTITY, identityId, identityProjectMembership.projectId, actorAuthMethod, actorOrgId ); - const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission); + + // we need to validate that the privilege given is not higher than the assigning users permission + // @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules + targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission)); + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission); if (!hasRequiredPriviledges) throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" }); @@ -103,11 +91,12 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ }); if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" }); + const packedPermission = JSON.stringify(packRules(customPermission)); if (!dto.isTemporary) { const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({ projectMembershipId: identityProjectMembership.id, slug, - permissions: customPermission + permissions: packedPermission }); return { ...additionalPrivilege, @@ -119,7 +108,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({ projectMembershipId: identityProjectMembership.id, slug, - permissions: customPermission, + permissions: packedPermission, isTemporary: true, temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative, temporaryRange: dto.temporaryRange, @@ -143,12 +132,12 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ actorAuthMethod }: TUpdateIdentityPrivilegeDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); if (!identityProjectMembership) - throw new BadRequestError({ message: `Failed to find identity with id ${identityId}` }); + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -158,14 +147,19 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); - const { permission: identityRolePermission } = await permissionService.getProjectPermission( + + const { permission: targetIdentityPermission } = await permissionService.getProjectPermission( ActorType.IDENTITY, identityProjectMembership.identityId, identityProjectMembership.projectId, actorAuthMethod, actorOrgId ); - const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission); + + // we need to validate that the privilege given is not higher than the assigning users permission + // @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules + targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || [])); + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission); if (!hasRequiredPriviledges) throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" }); @@ -173,7 +167,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ slug, projectMembershipId: identityProjectMembership.id }); - if (!identityPrivilege) throw new BadRequestError({ message: "Identity additional privilege not found" }); + if (!identityPrivilege) { + throw new NotFoundError({ + message: `Identity additional privilege with slug '${slug}' not found for the specified identity with ID '${identityProjectMembership.identityId}'` + }); + } if (data?.slug) { const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({ slug: data.slug, @@ -184,23 +182,29 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ } const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary; + + const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined; if (isTemporary) { const temporaryAccessStartTime = data?.temporaryAccessStartTime || identityPrivilege?.temporaryAccessStartTime; const temporaryRange = data?.temporaryRange || identityPrivilege?.temporaryRange; const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, { - ...data, + slug: data.slug, + permissions: packedPermission, + isTemporary: data.isTemporary, + temporaryRange: data.temporaryRange, + temporaryMode: data.temporaryMode, temporaryAccessStartTime: new Date(temporaryAccessStartTime || ""), temporaryAccessEndTime: new Date(new Date(temporaryAccessStartTime || "").getTime() + ms(temporaryRange || "")) }); return { ...additionalPrivilege, - permissions: unpackPermissions(additionalPrivilege.permissions) }; } const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, { - ...data, + slug: data.slug, + permissions: packedPermission, isTemporary: false, temporaryAccessStartTime: null, temporaryAccessEndTime: null, @@ -209,7 +213,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ }); return { ...additionalPrivilege, - permissions: unpackPermissions(additionalPrivilege.permissions) }; }; @@ -224,12 +227,12 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ actorAuthMethod }: TDeleteIdentityPrivilegeDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); if (!identityProjectMembership) - throw new BadRequestError({ message: `Failed to find identity with id ${identityId}` }); + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -254,7 +257,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ slug, projectMembershipId: identityProjectMembership.id }); - if (!identityPrivilege) throw new BadRequestError({ message: "Identity additional privilege not found" }); + if (!identityPrivilege) { + throw new NotFoundError({ + message: `Identity additional privilege with slug '${slug}' not found for the specified identity with ID '${identityProjectMembership.identityId}'` + }); + } const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id); return { @@ -274,12 +281,12 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ actorAuthMethod }: TGetIdentityPrivilegeDetailsDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); if (!identityProjectMembership) - throw new BadRequestError({ message: `Failed to find identity with id ${identityId}` }); + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -287,14 +294,17 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ actorAuthMethod, actorOrgId ); - ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity); const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({ slug, projectMembershipId: identityProjectMembership.id }); - if (!identityPrivilege) throw new BadRequestError({ message: "Identity additional privilege not found" }); - + if (!identityPrivilege) { + throw new NotFoundError({ + message: `Identity additional privilege with slug '${slug}' not found for the specified identity with ID '${identityProjectMembership.identityId}'` + }); + } return { ...identityPrivilege, permissions: unpackPermissions(identityPrivilege.permissions) @@ -310,12 +320,12 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ projectSlug }: TListIdentityPrivilegesDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); const projectId = project.id; const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); if (!identityProjectMembership) - throw new BadRequestError({ message: `Failed to find identity with id ${identityId}` }); + throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -330,7 +340,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({ }); return identityPrivileges.map((el) => ({ ...el, - permissions: unpackPermissions(el.permissions) })); }; diff --git a/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types.ts b/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types.ts index 88ff01d7da..6a0ecee5fd 100644 --- a/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types.ts +++ b/backend/src/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types.ts @@ -1,11 +1,13 @@ import { TProjectPermission } from "@app/lib/types"; +import { TProjectPermissionV2Schema } from "../permission/project-permission"; + export enum IdentityProjectAdditionalPrivilegeTemporaryMode { Relative = "relative" } export type TCreateIdentityPrivilegeDTO = { - permissions: unknown; + permissions: TProjectPermissionV2Schema[]; identityId: string; projectSlug: string; slug: string; @@ -27,7 +29,7 @@ export type TUpdateIdentityPrivilegeDTO = { slug: string; identityId: string; pr "projectId" > & { data: Partial<{ - permissions: unknown; + permissions: TProjectPermissionV2Schema[]; slug: string; isTemporary: boolean; temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative; diff --git a/backend/src/ee/services/ldap-config/ldap-config-service.ts b/backend/src/ee/services/ldap-config/ldap-config-service.ts index dd49bd0aeb..0cbab8c32a 100644 --- a/backend/src/ee/services/ldap-config/ldap-config-service.ts +++ b/backend/src/ee/services/ldap-config/ldap-config-service.ts @@ -1,14 +1,7 @@ import { ForbiddenError } from "@casl/ability"; import jwt from "jsonwebtoken"; -import { - OrgMembershipRole, - OrgMembershipStatus, - SecretKeyEncoding, - TableName, - TLdapConfigsUpdate, - TUsers -} from "@app/db/schemas"; +import { OrgMembershipStatus, SecretKeyEncoding, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas"; import { TGroupDALFactory } from "@app/ee/services/group/group-dal"; import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns"; import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal"; @@ -21,16 +14,21 @@ import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type"; +import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; +import { TokenType } from "@app/services/auth-token/auth-token-types"; import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal"; import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; +import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns"; import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal"; import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal"; +import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; +import { LoginMethod } from "@app/services/super-admin/super-admin-types"; import { TUserDALFactory } from "@app/services/user/user-dal"; import { normalizeUsername } from "@app/services/user/user-fns"; import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; @@ -50,7 +48,7 @@ import { TTestLdapConnectionDTO, TUpdateLdapCfgDTO } from "./ldap-config-types"; -import { testLDAPConfig } from "./ldap-fns"; +import { searchGroups, testLDAPConfig } from "./ldap-fns"; import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal"; type TLdapConfigServiceFactoryDep = { @@ -73,11 +71,19 @@ type TLdapConfigServiceFactoryDep = { >; userDAL: Pick< TUserDALFactory, - "create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find" + | "create" + | "findOne" + | "transaction" + | "updateById" + | "findUserEncKeyByUserIdsBatch" + | "find" + | "findUserEncKeyByUserId" >; userAliasDAL: Pick; permissionService: Pick; licenseService: Pick; + tokenService: Pick; + smtpService: Pick; }; export type TLdapConfigServiceFactory = ReturnType; @@ -97,7 +103,9 @@ export const ldapConfigServiceFactory = ({ userDAL, userAliasDAL, permissionService, - licenseService + licenseService, + tokenService, + smtpService }: TLdapConfigServiceFactoryDep) => { const createLdapCfg = async ({ actor, @@ -109,6 +117,7 @@ export const ldapConfigServiceFactory = ({ url, bindDN, bindPass, + uniqueUserAttribute, searchBase, searchFilter, groupSearchBase, @@ -187,6 +196,7 @@ export const ldapConfigServiceFactory = ({ encryptedBindPass, bindPassIV, bindPassTag, + uniqueUserAttribute, searchBase, searchFilter, groupSearchBase, @@ -209,6 +219,7 @@ export const ldapConfigServiceFactory = ({ url, bindDN, bindPass, + uniqueUserAttribute, searchBase, searchFilter, groupSearchBase, @@ -231,11 +242,16 @@ export const ldapConfigServiceFactory = ({ searchBase, searchFilter, groupSearchBase, - groupSearchFilter + groupSearchFilter, + uniqueUserAttribute }; const orgBot = await orgBotDAL.findOne({ orgId }); - if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" }); + if (!orgBot) + throw new NotFoundError({ + message: `Organization bot in organization with ID '${orgId}' not found`, + name: "OrgBotNotFound" + }); const key = infisicalSymmetricDecrypt({ ciphertext: orgBot.encryptedSymmetricKey, iv: orgBot.symmetricKeyIV, @@ -269,12 +285,21 @@ export const ldapConfigServiceFactory = ({ return ldapConfig; }; - const getLdapCfg = async (filter: { orgId: string; isActive?: boolean }) => { + const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => { const ldapConfig = await ldapConfigDAL.findOne(filter); - if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" }); + if (!ldapConfig) { + throw new NotFoundError({ + message: `Failed to find organization LDAP data in organization with ID '${filter.orgId}'` + }); + } const orgBot = await orgBotDAL.findOne({ orgId: ldapConfig.orgId }); - if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" }); + if (!orgBot) { + throw new NotFoundError({ + message: `Organization bot not found in organization with ID ${ldapConfig.orgId}`, + name: "OrgBotNotFound" + }); + } const key = infisicalSymmetricDecrypt({ ciphertext: orgBot.encryptedSymmetricKey, @@ -332,6 +357,7 @@ export const ldapConfigServiceFactory = ({ url: ldapConfig.url, bindDN, bindPass, + uniqueUserAttribute: ldapConfig.uniqueUserAttribute, searchBase: ldapConfig.searchBase, searchFilter: ldapConfig.searchFilter, groupSearchBase: ldapConfig.groupSearchBase, @@ -356,7 +382,7 @@ export const ldapConfigServiceFactory = ({ const bootLdap = async (organizationSlug: string) => { const organization = await orgDAL.findOne({ slug: organizationSlug }); - if (!organization) throw new BadRequestError({ message: "Org not found" }); + if (!organization) throw new NotFoundError({ message: `Organization with slug '${organizationSlug}' not found` }); const ldapConfig = await getLdapCfg({ orgId: organization.id, @@ -368,6 +394,7 @@ export const ldapConfigServiceFactory = ({ url: ldapConfig.url, bindDN: ldapConfig.bindDN, bindCredentials: ldapConfig.bindPass, + uniqueUserAttribute: ldapConfig.uniqueUserAttribute, searchBase: ldapConfig.searchBase, searchFilter: ldapConfig.searchFilter || "(uid={{username}})", // searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"], @@ -398,6 +425,13 @@ export const ldapConfigServiceFactory = ({ }: TLdapLoginDTO) => { const appCfg = getConfig(); const serverCfg = await getServerCfg(); + + if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.LDAP)) { + throw new ForbiddenRequestError({ + message: "Login with LDAP is disabled by administrator." + }); + } + let userAlias = await userAliasDAL.findOne({ externalId, orgId, @@ -405,7 +439,7 @@ export const ldapConfigServiceFactory = ({ }); const organization = await orgDAL.findOrgById(orgId); - if (!organization) throw new BadRequestError({ message: "Org not found" }); + if (!organization) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); if (userAlias) { await userDAL.transaction(async (tx) => { @@ -417,12 +451,16 @@ export const ldapConfigServiceFactory = ({ { tx } ); if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(organization.defaultMembershipRole); + await orgDAL.createMembership( { userId: userAlias.userId, orgId, - role: OrgMembershipRole.Member, - status: OrgMembershipStatus.Accepted + role, + roleId, + status: OrgMembershipStatus.Accepted, + isActive: true }, tx ); @@ -437,9 +475,24 @@ export const ldapConfigServiceFactory = ({ } }); } else { + const plan = await licenseService.getPlan(orgId); + if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) { + // limit imposed on number of members allowed / number of members used exceeds the number of members allowed + throw new BadRequestError({ + message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members." + }); + } + + if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) { + // limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed + throw new BadRequestError({ + message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members." + }); + } + userAlias = await userDAL.transaction(async (tx) => { let newUser: TUsers | undefined; - if (serverCfg.trustSamlEmails) { + if (serverCfg.trustLdapEmails) { newUser = await userDAL.findOne( { email, @@ -486,13 +539,17 @@ export const ldapConfigServiceFactory = ({ ); if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(organization.defaultMembershipRole); + await orgMembershipDAL.create( { - userId: userAlias.userId, + userId: newUser.id, inviteEmail: email, orgId, - role: OrgMembershipRole.Member, - status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + role, + roleId, + status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + isActive: true }, tx ); @@ -592,12 +649,14 @@ export const ldapConfigServiceFactory = ({ }); const isUserCompleted = Boolean(user.isAccepted); + const userEnc = await userDAL.findUserEncKeyByUserId(user.id); const providerAuthToken = jwt.sign( { authTokenType: AuthTokenType.PROVIDER_TOKEN, userId: user.id, username: user.username, + hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey), ...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }), firstName, lastName, @@ -619,6 +678,22 @@ export const ldapConfigServiceFactory = ({ } ); + if (user.email && !user.isEmailVerified) { + const token = await tokenService.createTokenForUser({ + type: TokenType.TOKEN_EMAIL_VERIFICATION, + userId: user.id + }); + + await smtpService.sendMail({ + template: SmtpTemplates.EmailVerification, + subjectLine: "Infisical confirmation code", + recipients: [user.email], + substitutions: { + code: token + } + }); + } + return { isUserCompleted, providerAuthToken }; }; @@ -638,7 +713,11 @@ export const ldapConfigServiceFactory = ({ orgId }); - if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" }); + if (!ldapConfig) { + throw new NotFoundError({ + message: `Failed to find organization LDAP data with ID '${ldapConfigId}' in organization with ID ${orgId}` + }); + } const groupMaps = await ldapGroupMapDAL.findLdapGroupMapsByLdapConfigId(ldapConfigId); @@ -664,14 +743,32 @@ export const ldapConfigServiceFactory = ({ message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map." }); - const ldapConfig = await ldapConfigDAL.findOne({ - id: ldapConfigId, - orgId + const ldapConfig = await getLdapCfg({ + orgId, + id: ldapConfigId }); - if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" }); + + if (!ldapConfig.groupSearchBase) { + throw new BadRequestError({ + message: "Configure a group search base in your LDAP configuration in order to proceed." + }); + } + + const groupSearchFilter = `(cn=${ldapGroupCN})`; + const groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase); + + if (!groups.some((g) => g.cn === ldapGroupCN)) { + throw new NotFoundError({ + message: "Failed to find LDAP Group CN" + }); + } const group = await groupDAL.findOne({ slug: groupSlug, orgId }); - if (!group) throw new BadRequestError({ message: "Failed to find group" }); + if (!group) { + throw new NotFoundError({ + message: `Failed to find group with slug '${groupSlug}' in organization with ID '${orgId}'` + }); + } const groupMap = await ldapGroupMapDAL.create({ ldapConfigId, @@ -705,7 +802,11 @@ export const ldapConfigServiceFactory = ({ orgId }); - if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" }); + if (!ldapConfig) { + throw new NotFoundError({ + message: `Failed to find organization LDAP data with ID '${ldapConfigId}' in organization with ID ${orgId}` + }); + } const [deletedGroupMap] = await ldapGroupMapDAL.delete({ ldapConfigId: ldapConfig.id, diff --git a/backend/src/ee/services/ldap-config/ldap-config-types.ts b/backend/src/ee/services/ldap-config/ldap-config-types.ts index aa4aa8da70..86f4bf0d56 100644 --- a/backend/src/ee/services/ldap-config/ldap-config-types.ts +++ b/backend/src/ee/services/ldap-config/ldap-config-types.ts @@ -7,6 +7,7 @@ export type TLDAPConfig = { url: string; bindDN: string; bindPass: string; + uniqueUserAttribute: string; searchBase: string; groupSearchBase: string; groupSearchFilter: string; @@ -19,6 +20,7 @@ export type TCreateLdapCfgDTO = { url: string; bindDN: string; bindPass: string; + uniqueUserAttribute: string; searchBase: string; searchFilter: string; groupSearchBase: string; @@ -33,6 +35,7 @@ export type TUpdateLdapCfgDTO = { url: string; bindDN: string; bindPass: string; + uniqueUserAttribute: string; searchBase: string; searchFilter: string; groupSearchBase: string; diff --git a/backend/src/ee/services/ldap-config/ldap-group-map-dal.ts b/backend/src/ee/services/ldap-config/ldap-group-map-dal.ts index 2264efa758..a08522e8d7 100644 --- a/backend/src/ee/services/ldap-config/ldap-group-map-dal.ts +++ b/backend/src/ee/services/ldap-config/ldap-group-map-dal.ts @@ -10,7 +10,8 @@ export const ldapGroupMapDALFactory = (db: TDbClient) => { const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => { try { - const docs = await db(TableName.LdapGroupMap) + const docs = await db + .replicaNode()(TableName.LdapGroupMap) .where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId) .join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`) .select(selectAllTableCols(TableName.LdapGroupMap)) diff --git a/backend/src/ee/services/license/__mocks__/licence-fns.ts b/backend/src/ee/services/license/__mocks__/license-fns.ts similarity index 77% rename from backend/src/ee/services/license/__mocks__/licence-fns.ts rename to backend/src/ee/services/license/__mocks__/license-fns.ts index b5cbf103ee..360b39f281 100644 --- a/backend/src/ee/services/license/__mocks__/licence-fns.ts +++ b/backend/src/ee/services/license/__mocks__/license-fns.ts @@ -7,6 +7,8 @@ export const getDefaultOnPremFeatures = () => { workspacesUsed: 0, memberLimit: null, membersUsed: 0, + identityLimit: null, + identitiesUsed: 0, environmentLimit: null, environmentsUsed: 0, secretVersioning: true, @@ -24,7 +26,10 @@ export const getDefaultOnPremFeatures = () => { status: null, trial_end: null, has_used_trial: true, - secretApproval: false, - secretRotation: true + secretApproval: true, + secretRotation: true, + caCrl: false }; }; + +export const setupLicenseRequestWithStore = () => {}; diff --git a/backend/src/ee/services/license/license-dal.ts b/backend/src/ee/services/license/license-dal.ts index cf70488019..cab428e86f 100644 --- a/backend/src/ee/services/license/license-dal.ts +++ b/backend/src/ee/services/license/license-dal.ts @@ -9,7 +9,7 @@ export type TLicenseDALFactory = ReturnType; export const licenseDALFactory = (db: TDbClient) => { const countOfOrgMembers = async (orgId: string | null, tx?: Knex) => { try { - const doc = await (tx || db)(TableName.OrgMembership) + const doc = await (tx || db.replicaNode())(TableName.OrgMembership) .where({ status: OrgMembershipStatus.Accepted }) .andWhere((bd) => { if (orgId) { @@ -19,11 +19,44 @@ export const licenseDALFactory = (db: TDbClient) => { .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) .where(`${TableName.Users}.isGhost`, false) .count(); - return doc?.[0].count; + return Number(doc?.[0].count); } catch (error) { throw new DatabaseError({ error, name: "Count of Org Members" }); } }; - return { countOfOrgMembers }; + const countOrgUsersAndIdentities = async (orgId: string | null, tx?: Knex) => { + try { + // count org users + const userDoc = await (tx || db)(TableName.OrgMembership) + .where({ status: OrgMembershipStatus.Accepted }) + .andWhere((bd) => { + if (orgId) { + void bd.where({ orgId }); + } + }) + .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) + .where(`${TableName.Users}.isGhost`, false) + .count(); + + const userCount = Number(userDoc?.[0].count); + + // count org identities + const identityDoc = await (tx || db)(TableName.IdentityOrgMembership) + .where((bd) => { + if (orgId) { + void bd.where({ orgId }); + } + }) + .count(); + + const identityCount = Number(identityDoc?.[0].count); + + return userCount + identityCount; + } catch (error) { + throw new DatabaseError({ error, name: "Count of Org Users + Identities" }); + } + }; + + return { countOfOrgMembers, countOrgUsersAndIdentities }; }; diff --git a/backend/src/ee/services/license/licence-fns.ts b/backend/src/ee/services/license/license-fns.ts similarity index 73% rename from backend/src/ee/services/license/licence-fns.ts rename to backend/src/ee/services/license/license-fns.ts index 189a3c4e06..70c2995641 100644 --- a/backend/src/ee/services/license/licence-fns.ts +++ b/backend/src/ee/services/license/license-fns.ts @@ -15,6 +15,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({ membersUsed: 0, environmentLimit: null, environmentsUsed: 0, + identityLimit: null, + identitiesUsed: 0, dynamicSecret: false, secretVersioning: true, pitRecovery: false, @@ -27,6 +29,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({ auditLogStreams: false, auditLogStreamLimit: 3, samlSSO: false, + hsm: false, + oidcSSO: false, scim: false, ldap: false, groups: false, @@ -34,18 +38,29 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({ trial_end: null, has_used_trial: true, secretApproval: false, - secretRotation: true + secretRotation: true, + caCrl: false, + instanceUserManagement: false, + externalKms: false, + rateLimits: { + readLimit: 60, + writeLimit: 200, + secretsLimit: 40 + }, + pkiEst: false, + enforceMfa: false, + projectTemplates: false }); -export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => { +export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => { let token: string; - const licenceReq = axios.create({ + const licenseReq = axios.create({ baseURL, timeout: 35 * 1000 // signal: AbortSignal.timeout(60 * 1000) }); - const refreshLicence = async () => { + const refreshLicense = async () => { const appCfg = getConfig(); const { data: { token: authToken } @@ -63,7 +78,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string return token; }; - licenceReq.interceptors.request.use( + licenseReq.interceptors.request.use( (config) => { if (token && config.headers) { // eslint-disable-next-line no-param-reassign @@ -74,7 +89,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string (err) => Promise.reject(err) ); - licenceReq.interceptors.response.use( + licenseReq.interceptors.response.use( (response) => response, async (err) => { const originalRequest = (err as AxiosError).config; @@ -85,15 +100,15 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string (originalRequest as any)._retry = true; // injected // refresh - await refreshLicence(); + await refreshLicense(); - licenceReq.defaults.headers.common.Authorization = `Bearer ${token}`; - return licenceReq(originalRequest!); + licenseReq.defaults.headers.common.Authorization = `Bearer ${token}`; + return licenseReq(originalRequest!); } return Promise.reject(err); } ); - return { request: licenceReq, refreshLicence }; + return { request: licenseReq, refreshLicense }; }; diff --git a/backend/src/ee/services/license/license-service.ts b/backend/src/ee/services/license/license-service.ts index 47b46d0100..dc56e7bc3b 100644 --- a/backend/src/ee/services/license/license-service.ts +++ b/backend/src/ee/services/license/license-service.ts @@ -5,18 +5,19 @@ // TODO(akhilmhdh): With tony find out the api structure and fill it here import { ForbiddenError } from "@casl/ability"; +import { Knex } from "knex"; import { TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { verifyOfflineLicense } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; +import { NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission"; import { TPermissionServiceFactory } from "../permission/permission-service"; -import { getDefaultOnPremFeatures, setupLicenceRequestWithStore } from "./licence-fns"; import { TLicenseDALFactory } from "./license-dal"; +import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns"; import { InstanceType, TAddOrgPmtMethodDTO, @@ -63,13 +64,13 @@ export const licenseServiceFactory = ({ let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures(); const appCfg = getConfig(); - const licenseServerCloudApi = setupLicenceRequestWithStore( + const licenseServerCloudApi = setupLicenseRequestWithStore( appCfg.LICENSE_SERVER_URL || "", LICENSE_SERVER_CLOUD_LOGIN, appCfg.LICENSE_SERVER_KEY || "" ); - const licenseServerOnPremApi = setupLicenceRequestWithStore( + const licenseServerOnPremApi = setupLicenseRequestWithStore( appCfg.LICENSE_SERVER_URL || "", LICENSE_SERVER_ON_PREM_LOGIN, appCfg.LICENSE_KEY || "" @@ -78,7 +79,7 @@ export const licenseServiceFactory = ({ const init = async () => { try { if (appCfg.LICENSE_SERVER_KEY) { - const token = await licenseServerCloudApi.refreshLicence(); + const token = await licenseServerCloudApi.refreshLicense(); if (token) instanceType = InstanceType.Cloud; logger.info(`Instance type: ${InstanceType.Cloud}`); isValidLicense = true; @@ -86,7 +87,7 @@ export const licenseServiceFactory = ({ } if (appCfg.LICENSE_KEY) { - const token = await licenseServerOnPremApi.refreshLicence(); + const token = await licenseServerOnPremApi.refreshLicense(); if (token) { const { data: { currentPlan } @@ -128,7 +129,7 @@ export const licenseServiceFactory = ({ } } - // this means this is self hosted oss version + // this means this is the self-hosted oss version // else it would reach catch statement isValidLicense = true; } catch (error) { @@ -144,7 +145,7 @@ export const licenseServiceFactory = ({ if (cachedPlan) return JSON.parse(cachedPlan) as TFeatureSet; const org = await orgDAL.findOrgById(orgId); - if (!org) throw new BadRequestError({ message: "Org not found" }); + if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); const { data: { currentPlan } } = await licenseServerCloudApi.request.get<{ currentPlan: TFeatureSet }>( @@ -155,6 +156,7 @@ export const licenseServiceFactory = ({ LICENSE_SERVER_CLOUD_PLAN_TTL, JSON.stringify(currentPlan) ); + return currentPlan; } } catch (error) { @@ -199,21 +201,29 @@ export const licenseServiceFactory = ({ await licenseServerCloudApi.request.delete(`/api/license-server/v1/customers/${customerId}`); }; - const updateSubscriptionOrgMemberCount = async (orgId: string) => { + const updateSubscriptionOrgMemberCount = async (orgId: string, tx?: Knex) => { if (instanceType === InstanceType.Cloud) { const org = await orgDAL.findOrgById(orgId); - if (!org) throw new BadRequestError({ message: "Org not found" }); + if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); - const count = await licenseDAL.countOfOrgMembers(orgId); + const quantity = await licenseDAL.countOfOrgMembers(orgId, tx); + const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId, tx); if (org?.customerId) { await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, { - quantity: count + quantity, + quantityIdentities }); } await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId)); } else if (instanceType === InstanceType.EnterpriseOnPrem) { - const usedSeats = await licenseDAL.countOfOrgMembers(null); - await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats }); + const usedSeats = await licenseDAL.countOfOrgMembers(null, tx); + const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null, tx); + onPremFeatures.membersUsed = usedSeats; + onPremFeatures.identitiesUsed = usedIdentitySeats; + await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { + usedSeats, + usedIdentitySeats + }); } await refreshPlan(orgId); }; @@ -256,8 +266,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -284,8 +294,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: "Organization not found" }); } @@ -330,8 +340,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } const { data } = await licenseServerCloudApi.request.get( @@ -347,8 +357,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } const { data } = await licenseServerCloudApi.request.get( @@ -363,8 +373,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -388,8 +398,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } const { data } = await licenseServerCloudApi.request.patch( @@ -408,8 +418,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -435,8 +445,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } const { @@ -464,8 +474,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -481,8 +491,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } const { @@ -499,8 +509,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -520,8 +530,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -537,8 +547,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -554,8 +564,8 @@ export const licenseServiceFactory = ({ const organization = await orgDAL.findOrgById(orgId); if (!organization) { - throw new BadRequestError({ - message: "Failed to find organization" + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` }); } @@ -575,6 +585,9 @@ export const licenseServiceFactory = ({ getInstanceType() { return instanceType; }, + get onPremFeatures() { + return onPremFeatures; + }, getPlan, updateSubscriptionOrgMemberCount, refreshPlan, diff --git a/backend/src/ee/services/license/license-types.ts b/backend/src/ee/services/license/license-types.ts index 0c8fdc197f..622b0e06b0 100644 --- a/backend/src/ee/services/license/license-types.ts +++ b/backend/src/ee/services/license/license-types.ts @@ -30,7 +30,9 @@ export type TFeatureSet = { workspacesUsed: 0; dynamicSecret: false; memberLimit: null; - membersUsed: 0; + membersUsed: number; + identityLimit: null; + identitiesUsed: number; environmentLimit: null; environmentsUsed: 0; secretVersioning: true; @@ -44,6 +46,8 @@ export type TFeatureSet = { auditLogStreams: false; auditLogStreamLimit: 3; samlSSO: false; + hsm: false; + oidcSSO: false; scim: false; ldap: false; groups: false; @@ -52,6 +56,17 @@ export type TFeatureSet = { has_used_trial: true; secretApproval: false; secretRotation: true; + caCrl: false; + instanceUserManagement: false; + externalKms: false; + rateLimits: { + readLimit: number; + writeLimit: number; + secretsLimit: number; + }; + pkiEst: boolean; + enforceMfa: boolean; + projectTemplates: false; }; export type TOrgPlansTableDTO = { diff --git a/backend/src/ee/services/oidc/oidc-config-dal.ts b/backend/src/ee/services/oidc/oidc-config-dal.ts new file mode 100644 index 0000000000..ffdba2cf76 --- /dev/null +++ b/backend/src/ee/services/oidc/oidc-config-dal.ts @@ -0,0 +1,29 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +export type TOidcConfigDALFactory = ReturnType; + +export const oidcConfigDALFactory = (db: TDbClient) => { + const oidcCfgOrm = ormify(db, TableName.OidcConfig); + + const findEnforceableOidcCfg = async (orgId: string) => { + try { + const oidcCfg = await db + .replicaNode()(TableName.OidcConfig) + .where({ + orgId, + isActive: true + }) + .whereNotNull("lastUsed") + .first(); + + return oidcCfg; + } catch (error) { + throw new DatabaseError({ error, name: "Find org by id" }); + } + }; + + return { ...oidcCfgOrm, findEnforceableOidcCfg }; +}; diff --git a/backend/src/ee/services/oidc/oidc-config-service.ts b/backend/src/ee/services/oidc/oidc-config-service.ts new file mode 100644 index 0000000000..17c1ddaaf2 --- /dev/null +++ b/backend/src/ee/services/oidc/oidc-config-service.ts @@ -0,0 +1,665 @@ +/* eslint-disable @typescript-eslint/no-unsafe-call */ +import { ForbiddenError } from "@casl/ability"; +import jwt from "jsonwebtoken"; +import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client"; + +import { OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas"; +import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { getConfig } from "@app/lib/config/env"; +import { + decryptSymmetric, + encryptSymmetric, + generateAsymmetricKeyPair, + generateSymmetricKey, + infisicalSymmetricDecrypt, + infisicalSymmetricEncypt +} from "@app/lib/crypto/encryption"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type"; +import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; +import { TokenType } from "@app/services/auth-token/auth-token-types"; +import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal"; +import { TOrgDALFactory } from "@app/services/org/org-dal"; +import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns"; +import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; +import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; +import { getServerCfg } from "@app/services/super-admin/super-admin-service"; +import { LoginMethod } from "@app/services/super-admin/super-admin-types"; +import { TUserDALFactory } from "@app/services/user/user-dal"; +import { normalizeUsername } from "@app/services/user/user-fns"; +import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; +import { UserAliasType } from "@app/services/user-alias/user-alias-types"; + +import { TOidcConfigDALFactory } from "./oidc-config-dal"; +import { + OIDCConfigurationType, + TCreateOidcCfgDTO, + TGetOidcCfgDTO, + TOidcLoginDTO, + TUpdateOidcCfgDTO +} from "./oidc-config-types"; + +type TOidcConfigServiceFactoryDep = { + userDAL: Pick< + TUserDALFactory, + "create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId" + >; + userAliasDAL: Pick; + orgDAL: Pick< + TOrgDALFactory, + "createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById" + >; + orgMembershipDAL: Pick; + orgBotDAL: Pick; + licenseService: Pick; + tokenService: Pick; + smtpService: Pick; + permissionService: Pick; + oidcConfigDAL: Pick; +}; + +export type TOidcConfigServiceFactory = ReturnType; + +export const oidcConfigServiceFactory = ({ + orgDAL, + orgMembershipDAL, + userDAL, + userAliasDAL, + licenseService, + permissionService, + tokenService, + orgBotDAL, + smtpService, + oidcConfigDAL +}: TOidcConfigServiceFactoryDep) => { + const getOidc = async (dto: TGetOidcCfgDTO) => { + const org = await orgDAL.findOne({ slug: dto.orgSlug }); + if (!org) { + throw new NotFoundError({ + message: `Organization with slug '${dto.orgSlug}' not found`, + name: "OrgNotFound" + }); + } + if (dto.type === "external") { + const { permission } = await permissionService.getOrgPermission( + dto.actor, + dto.actorId, + org.id, + dto.actorAuthMethod, + dto.actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso); + } + + const oidcCfg = await oidcConfigDAL.findOne({ + orgId: org.id + }); + + if (!oidcCfg) { + throw new NotFoundError({ + message: `OIDC configuration for organization with slug '${dto.orgSlug}' not found` + }); + } + + // decrypt and return cfg + const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId }); + if (!orgBot) { + throw new NotFoundError({ + message: `Organization bot for organization with ID '${oidcCfg.orgId}' not found`, + name: "OrgBotNotFound" + }); + } + + const key = infisicalSymmetricDecrypt({ + ciphertext: orgBot.encryptedSymmetricKey, + iv: orgBot.symmetricKeyIV, + tag: orgBot.symmetricKeyTag, + keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding + }); + + const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } = + oidcCfg; + + let clientId = ""; + if (encryptedClientId && clientIdIV && clientIdTag) { + clientId = decryptSymmetric({ + ciphertext: encryptedClientId, + key, + tag: clientIdTag, + iv: clientIdIV + }); + } + + let clientSecret = ""; + if (encryptedClientSecret && clientSecretIV && clientSecretTag) { + clientSecret = decryptSymmetric({ + key, + tag: clientSecretTag, + iv: clientSecretIV, + ciphertext: encryptedClientSecret + }); + } + + return { + id: oidcCfg.id, + issuer: oidcCfg.issuer, + authorizationEndpoint: oidcCfg.authorizationEndpoint, + configurationType: oidcCfg.configurationType, + discoveryURL: oidcCfg.discoveryURL, + jwksUri: oidcCfg.jwksUri, + tokenEndpoint: oidcCfg.tokenEndpoint, + userinfoEndpoint: oidcCfg.userinfoEndpoint, + orgId: oidcCfg.orgId, + isActive: oidcCfg.isActive, + allowedEmailDomains: oidcCfg.allowedEmailDomains, + clientId, + clientSecret + }; + }; + + const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => { + const serverCfg = await getServerCfg(); + + if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) { + throw new ForbiddenRequestError({ + message: "Login with OIDC is disabled by administrator." + }); + } + + const appCfg = getConfig(); + const userAlias = await userAliasDAL.findOne({ + externalId, + orgId, + aliasType: UserAliasType.OIDC + }); + + const organization = await orgDAL.findOrgById(orgId); + if (!organization) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); + + let user: TUsers; + if (userAlias) { + user = await userDAL.transaction(async (tx) => { + const foundUser = await userDAL.findById(userAlias.userId, tx); + const [orgMembership] = await orgDAL.findMembership( + { + [`${TableName.OrgMembership}.userId` as "userId"]: foundUser.id, + [`${TableName.OrgMembership}.orgId` as "id"]: orgId + }, + { tx } + ); + if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(organization.defaultMembershipRole); + + await orgMembershipDAL.create( + { + userId: userAlias.userId, + inviteEmail: email, + orgId, + role, + roleId, + status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + isActive: true + }, + tx + ); + // Only update the membership to Accepted if the user account is already completed. + } else if (orgMembership.status === OrgMembershipStatus.Invited && foundUser.isAccepted) { + await orgDAL.updateMembershipById( + orgMembership.id, + { + status: OrgMembershipStatus.Accepted + }, + tx + ); + } + + return foundUser; + }); + } else { + user = await userDAL.transaction(async (tx) => { + let newUser: TUsers | undefined; + + if (serverCfg.trustOidcEmails) { + newUser = await userDAL.findOne( + { + email, + isEmailVerified: true + }, + tx + ); + } + + if (!newUser) { + const uniqueUsername = await normalizeUsername(externalId, userDAL); + newUser = await userDAL.create( + { + email, + firstName, + isEmailVerified: serverCfg.trustOidcEmails, + username: serverCfg.trustOidcEmails ? email : uniqueUsername, + lastName, + authMethods: [], + isGhost: false + }, + tx + ); + } + + await userAliasDAL.create( + { + userId: newUser.id, + aliasType: UserAliasType.OIDC, + externalId, + emails: email ? [email] : [], + orgId + }, + tx + ); + + const [orgMembership] = await orgDAL.findMembership( + { + [`${TableName.OrgMembership}.userId` as "userId"]: newUser.id, + [`${TableName.OrgMembership}.orgId` as "id"]: orgId + }, + { tx } + ); + + if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(organization.defaultMembershipRole); + + await orgMembershipDAL.create( + { + userId: newUser.id, + inviteEmail: email, + orgId, + role, + roleId, + status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + isActive: true + }, + tx + ); + // Only update the membership to Accepted if the user account is already completed. + } else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) { + await orgDAL.updateMembershipById( + orgMembership.id, + { + status: OrgMembershipStatus.Accepted + }, + tx + ); + } + + return newUser; + }); + } + + await licenseService.updateSubscriptionOrgMemberCount(organization.id); + + const userEnc = await userDAL.findUserEncKeyByUserId(user.id); + const isUserCompleted = Boolean(user.isAccepted); + const providerAuthToken = jwt.sign( + { + authTokenType: AuthTokenType.PROVIDER_TOKEN, + userId: user.id, + username: user.username, + ...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }), + firstName, + lastName, + organizationName: organization.name, + organizationId: organization.id, + organizationSlug: organization.slug, + hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey), + authMethod: AuthMethod.OIDC, + authType: UserAliasType.OIDC, + isUserCompleted, + ...(callbackPort && { callbackPort }) + }, + appCfg.AUTH_SECRET, + { + expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME + } + ); + + await oidcConfigDAL.update({ orgId }, { lastUsed: new Date() }); + + if (user.email && !user.isEmailVerified) { + const token = await tokenService.createTokenForUser({ + type: TokenType.TOKEN_EMAIL_VERIFICATION, + userId: user.id + }); + + await smtpService.sendMail({ + template: SmtpTemplates.EmailVerification, + subjectLine: "Infisical confirmation code", + recipients: [user.email], + substitutions: { + code: token + } + }); + } + + return { isUserCompleted, providerAuthToken }; + }; + + const updateOidcCfg = async ({ + orgSlug, + allowedEmailDomains, + configurationType, + discoveryURL, + actor, + actorOrgId, + actorAuthMethod, + actorId, + issuer, + isActive, + authorizationEndpoint, + jwksUri, + tokenEndpoint, + userinfoEndpoint, + clientId, + clientSecret + }: TUpdateOidcCfgDTO) => { + const org = await orgDAL.findOne({ + slug: orgSlug + }); + + if (!org) { + throw new NotFoundError({ + message: `Organization with slug '${orgSlug}' not found` + }); + } + + const plan = await licenseService.getPlan(org.id); + if (!plan.oidcSSO) + throw new BadRequestError({ + message: + "Failed to update OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration." + }); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + org.id, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso); + + const orgBot = await orgBotDAL.findOne({ orgId: org.id }); + if (!orgBot) + throw new NotFoundError({ + message: `Organization bot for organization with ID '${org.id}' not found`, + name: "OrgBotNotFound" + }); + const key = infisicalSymmetricDecrypt({ + ciphertext: orgBot.encryptedSymmetricKey, + iv: orgBot.symmetricKeyIV, + tag: orgBot.symmetricKeyTag, + keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding + }); + + const updateQuery: TOidcConfigsUpdate = { + allowedEmailDomains, + configurationType, + discoveryURL, + issuer, + authorizationEndpoint, + tokenEndpoint, + userinfoEndpoint, + jwksUri, + isActive, + lastUsed: null + }; + + if (clientId !== undefined) { + const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key); + updateQuery.encryptedClientId = encryptedClientId; + updateQuery.clientIdIV = clientIdIV; + updateQuery.clientIdTag = clientIdTag; + } + + if (clientSecret !== undefined) { + const { + ciphertext: encryptedClientSecret, + iv: clientSecretIV, + tag: clientSecretTag + } = encryptSymmetric(clientSecret, key); + + updateQuery.encryptedClientSecret = encryptedClientSecret; + updateQuery.clientSecretIV = clientSecretIV; + updateQuery.clientSecretTag = clientSecretTag; + } + + const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery); + await orgDAL.updateById(org.id, { authEnforced: false, scimEnabled: false }); + return ssoConfig; + }; + + const createOidcCfg = async ({ + orgSlug, + allowedEmailDomains, + configurationType, + discoveryURL, + actor, + actorOrgId, + actorAuthMethod, + actorId, + issuer, + isActive, + authorizationEndpoint, + jwksUri, + tokenEndpoint, + userinfoEndpoint, + clientId, + clientSecret + }: TCreateOidcCfgDTO) => { + const org = await orgDAL.findOne({ + slug: orgSlug + }); + if (!org) { + throw new NotFoundError({ + message: `Organization with slug '${orgSlug}' not found` + }); + } + + const plan = await licenseService.getPlan(org.id); + if (!plan.oidcSSO) + throw new BadRequestError({ + message: + "Failed to create OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration." + }); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + org.id, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso); + + const orgBot = await orgBotDAL.transaction(async (tx) => { + const doc = await orgBotDAL.findOne({ orgId: org.id }, tx); + if (doc) return doc; + + const { privateKey, publicKey } = generateAsymmetricKeyPair(); + const key = generateSymmetricKey(); + const { + ciphertext: encryptedPrivateKey, + iv: privateKeyIV, + tag: privateKeyTag, + encoding: privateKeyKeyEncoding, + algorithm: privateKeyAlgorithm + } = infisicalSymmetricEncypt(privateKey); + const { + ciphertext: encryptedSymmetricKey, + iv: symmetricKeyIV, + tag: symmetricKeyTag, + encoding: symmetricKeyKeyEncoding, + algorithm: symmetricKeyAlgorithm + } = infisicalSymmetricEncypt(key); + + return orgBotDAL.create( + { + name: "Infisical org bot", + publicKey, + privateKeyIV, + encryptedPrivateKey, + symmetricKeyIV, + symmetricKeyTag, + encryptedSymmetricKey, + symmetricKeyAlgorithm, + orgId: org.id, + privateKeyTag, + privateKeyAlgorithm, + privateKeyKeyEncoding, + symmetricKeyKeyEncoding + }, + tx + ); + }); + + const key = infisicalSymmetricDecrypt({ + ciphertext: orgBot.encryptedSymmetricKey, + iv: orgBot.symmetricKeyIV, + tag: orgBot.symmetricKeyTag, + keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding + }); + + const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key); + const { + ciphertext: encryptedClientSecret, + iv: clientSecretIV, + tag: clientSecretTag + } = encryptSymmetric(clientSecret, key); + + const oidcCfg = await oidcConfigDAL.create({ + issuer, + isActive, + configurationType, + discoveryURL, + authorizationEndpoint, + allowedEmailDomains, + jwksUri, + tokenEndpoint, + userinfoEndpoint, + orgId: org.id, + encryptedClientId, + clientIdIV, + clientIdTag, + encryptedClientSecret, + clientSecretIV, + clientSecretTag + }); + + return oidcCfg; + }; + + const getOrgAuthStrategy = async (orgSlug: string, callbackPort?: string) => { + const appCfg = getConfig(); + + const org = await orgDAL.findOne({ + slug: orgSlug + }); + + if (!org) { + throw new NotFoundError({ + message: `Organization with slug '${orgSlug}' not found` + }); + } + + const oidcCfg = await getOidc({ + type: "internal", + orgSlug + }); + + if (!oidcCfg || !oidcCfg.isActive) { + throw new ForbiddenRequestError({ + message: "Failed to authenticate with OIDC SSO" + }); + } + + let issuer: Issuer; + if (oidcCfg.configurationType === OIDCConfigurationType.DISCOVERY_URL) { + if (!oidcCfg.discoveryURL) { + throw new BadRequestError({ + message: "OIDC not configured correctly" + }); + } + issuer = await Issuer.discover(oidcCfg.discoveryURL); + } else { + if ( + !oidcCfg.issuer || + !oidcCfg.authorizationEndpoint || + !oidcCfg.jwksUri || + !oidcCfg.tokenEndpoint || + !oidcCfg.userinfoEndpoint + ) { + throw new BadRequestError({ + message: "OIDC not configured correctly" + }); + } + issuer = new OpenIdIssuer({ + issuer: oidcCfg.issuer, + authorization_endpoint: oidcCfg.authorizationEndpoint, + jwks_uri: oidcCfg.jwksUri, + token_endpoint: oidcCfg.tokenEndpoint, + userinfo_endpoint: oidcCfg.userinfoEndpoint + }); + } + + const client = new issuer.Client({ + client_id: oidcCfg.clientId, + client_secret: oidcCfg.clientSecret, + redirect_uris: [`${appCfg.SITE_URL}/api/v1/sso/oidc/callback`] + }); + + const strategy = new OpenIdStrategy( + { + client, + passReqToCallback: true + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (_req: any, tokenSet: TokenSet, cb: any) => { + const claims = tokenSet.claims(); + if (!claims.email || !claims.given_name) { + throw new BadRequestError({ + message: "Invalid request. Missing email or first name" + }); + } + + if (oidcCfg.allowedEmailDomains) { + const allowedDomains = oidcCfg.allowedEmailDomains.split(", "); + if (!allowedDomains.includes(claims.email.split("@")[1])) { + throw new ForbiddenRequestError({ + message: "Email not allowed." + }); + } + } + + oidcLogin({ + email: claims.email, + externalId: claims.sub, + firstName: claims.given_name ?? "", + lastName: claims.family_name ?? "", + orgId: org.id, + callbackPort + }) + .then(({ isUserCompleted, providerAuthToken }) => { + cb(null, { isUserCompleted, providerAuthToken }); + }) + .catch((error) => { + cb(error); + }); + } + ); + + return strategy; + }; + + return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg }; +}; diff --git a/backend/src/ee/services/oidc/oidc-config-types.ts b/backend/src/ee/services/oidc/oidc-config-types.ts new file mode 100644 index 0000000000..6e36b796b3 --- /dev/null +++ b/backend/src/ee/services/oidc/oidc-config-types.ts @@ -0,0 +1,56 @@ +import { TGenericPermission } from "@app/lib/types"; + +export enum OIDCConfigurationType { + CUSTOM = "custom", + DISCOVERY_URL = "discoveryURL" +} + +export type TOidcLoginDTO = { + externalId: string; + email: string; + firstName: string; + lastName?: string; + orgId: string; + callbackPort?: string; +}; + +export type TGetOidcCfgDTO = + | ({ + type: "external"; + orgSlug: string; + } & TGenericPermission) + | { + type: "internal"; + orgSlug: string; + }; + +export type TCreateOidcCfgDTO = { + issuer?: string; + authorizationEndpoint?: string; + discoveryURL?: string; + configurationType: OIDCConfigurationType; + allowedEmailDomains?: string; + jwksUri?: string; + tokenEndpoint?: string; + userinfoEndpoint?: string; + clientId: string; + clientSecret: string; + isActive: boolean; + orgSlug: string; +} & TGenericPermission; + +export type TUpdateOidcCfgDTO = Partial<{ + issuer: string; + authorizationEndpoint: string; + allowedEmailDomains: string; + discoveryURL: string; + jwksUri: string; + configurationType: OIDCConfigurationType; + tokenEndpoint: string; + userinfoEndpoint: string; + clientId: string; + clientSecret: string; + isActive: boolean; + orgSlug: string; +}> & + TGenericPermission; diff --git a/backend/src/ee/services/permission/org-permission.ts b/backend/src/ee/services/permission/org-permission.ts index 9fece040bc..e0da494c6b 100644 --- a/backend/src/ee/services/permission/org-permission.ts +++ b/backend/src/ee/services/permission/org-permission.ts @@ -1,7 +1,5 @@ import { AbilityBuilder, createMongoAbility, MongoAbility } from "@casl/ability"; -import { conditionsMatcher } from "@app/lib/casl"; - export enum OrgPermissionActions { Read = "read", Create = "create", @@ -9,6 +7,10 @@ export enum OrgPermissionActions { Delete = "delete" } +export enum OrgPermissionAdminConsoleAction { + AccessAllProjects = "access-all-projects" +} + export enum OrgPermissionSubjects { Workspace = "workspace", Role = "role", @@ -21,7 +23,11 @@ export enum OrgPermissionSubjects { Groups = "groups", Billing = "billing", SecretScanning = "secret-scanning", - Identity = "identity" + Identity = "identity", + Kms = "kms", + AdminConsole = "organization-admin-console", + AuditLogs = "audit-logs", + ProjectTemplates = "project-templates" } export type OrgPermissionSet = @@ -37,10 +43,14 @@ export type OrgPermissionSet = | [OrgPermissionActions, OrgPermissionSubjects.Groups] | [OrgPermissionActions, OrgPermissionSubjects.SecretScanning] | [OrgPermissionActions, OrgPermissionSubjects.Billing] - | [OrgPermissionActions, OrgPermissionSubjects.Identity]; + | [OrgPermissionActions, OrgPermissionSubjects.Identity] + | [OrgPermissionActions, OrgPermissionSubjects.Kms] + | [OrgPermissionActions, OrgPermissionSubjects.AuditLogs] + | [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates] + | [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]; const buildAdminPermission = () => { - const { can, build } = new AbilityBuilder>(createMongoAbility); + const { can, rules } = new AbilityBuilder>(createMongoAbility); // ws permissions can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace); can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace); @@ -100,23 +110,38 @@ const buildAdminPermission = () => { can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity); - return build({ conditionsMatcher }); + can(OrgPermissionActions.Read, OrgPermissionSubjects.Kms); + can(OrgPermissionActions.Create, OrgPermissionSubjects.Kms); + can(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms); + can(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms); + + can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs); + can(OrgPermissionActions.Create, OrgPermissionSubjects.AuditLogs); + can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs); + can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs); + + can(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates); + can(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates); + can(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates); + can(OrgPermissionActions.Delete, OrgPermissionSubjects.ProjectTemplates); + + can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole); + + return rules; }; export const orgAdminPermissions = buildAdminPermission(); const buildMemberPermission = () => { - const { can, build } = new AbilityBuilder>(createMongoAbility); + const { can, rules } = new AbilityBuilder>(createMongoAbility); can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace); can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace); can(OrgPermissionActions.Read, OrgPermissionSubjects.Member); - can(OrgPermissionActions.Create, OrgPermissionSubjects.Member); can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups); can(OrgPermissionActions.Read, OrgPermissionSubjects.Role); can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings); can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing); - can(OrgPermissionActions.Read, OrgPermissionSubjects.Sso); can(OrgPermissionActions.Read, OrgPermissionSubjects.IncidentAccount); can(OrgPermissionActions.Read, OrgPermissionSubjects.SecretScanning); @@ -129,14 +154,16 @@ const buildMemberPermission = () => { can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity); - return build({ conditionsMatcher }); + can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs); + + return rules; }; export const orgMemberPermissions = buildMemberPermission(); const buildNoAccessPermission = () => { - const { build } = new AbilityBuilder>(createMongoAbility); - return build({ conditionsMatcher }); + const { rules } = new AbilityBuilder>(createMongoAbility); + return rules; }; export const orgNoAccessPermissions = buildNoAccessPermission(); diff --git a/backend/src/ee/services/permission/permission-dal.ts b/backend/src/ee/services/permission/permission-dal.ts index d8114388e7..8ad58f5288 100644 --- a/backend/src/ee/services/permission/permission-dal.ts +++ b/backend/src/ee/services/permission/permission-dal.ts @@ -1,7 +1,13 @@ import { z } from "zod"; import { TDbClient } from "@app/db"; -import { IdentityProjectMembershipRoleSchema, ProjectUserMembershipRolesSchema, TableName } from "@app/db/schemas"; +import { + IdentityProjectMembershipRoleSchema, + OrgMembershipsSchema, + TableName, + TProjectRoles, + TProjects +} from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; @@ -10,17 +16,92 @@ export type TPermissionDALFactory = ReturnType; export const permissionDALFactory = (db: TDbClient) => { const getOrgPermission = async (userId: string, orgId: string) => { try { - const membership = await db(TableName.OrgMembership) - .leftJoin(TableName.OrgRoles, `${TableName.OrgMembership}.roleId`, `${TableName.OrgRoles}.id`) - .join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`) - .where("userId", userId) - .where(`${TableName.OrgMembership}.orgId`, orgId) - .select(db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced")) - .select("permissions") - .select(selectAllTableCols(TableName.OrgMembership)) - .first(); + const groupSubQuery = db(TableName.Groups) + .where(`${TableName.Groups}.orgId`, orgId) + .join(TableName.UserGroupMembership, (queryBuilder) => { + queryBuilder + .on(`${TableName.UserGroupMembership}.groupId`, `${TableName.Groups}.id`) + .andOn(`${TableName.UserGroupMembership}.userId`, db.raw("?", [userId])); + }) + .leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`) + .select( + db.ref("id").withSchema(TableName.Groups).as("groupId"), + db.ref("orgId").withSchema(TableName.Groups).as("groupOrgId"), + db.ref("name").withSchema(TableName.Groups).as("groupName"), + db.ref("slug").withSchema(TableName.Groups).as("groupSlug"), + db.ref("role").withSchema(TableName.Groups).as("groupRole"), + db.ref("roleId").withSchema(TableName.Groups).as("groupRoleId"), + db.ref("createdAt").withSchema(TableName.Groups).as("groupCreatedAt"), + db.ref("updatedAt").withSchema(TableName.Groups).as("groupUpdatedAt"), + db.ref("permissions").withSchema(TableName.OrgRoles).as("groupCustomRolePermission") + ); - return membership; + const membership = await db + .replicaNode()(TableName.OrgMembership) + .where(`${TableName.OrgMembership}.orgId`, orgId) + .where(`${TableName.OrgMembership}.userId`, userId) + .leftJoin(TableName.OrgRoles, `${TableName.OrgRoles}.id`, `${TableName.OrgMembership}.roleId`) + .leftJoin[0]>( + groupSubQuery.as("userGroups"), + "userGroups.groupOrgId", + db.raw("?", [orgId]) + ) + .join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`) + .select( + selectAllTableCols(TableName.OrgMembership), + db.ref("slug").withSchema(TableName.OrgRoles).withSchema(TableName.OrgRoles).as("customRoleSlug"), + db.ref("permissions").withSchema(TableName.OrgRoles), + db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"), + db.ref("groupId").withSchema("userGroups"), + db.ref("groupOrgId").withSchema("userGroups"), + db.ref("groupName").withSchema("userGroups"), + db.ref("groupSlug").withSchema("userGroups"), + db.ref("groupRole").withSchema("userGroups"), + db.ref("groupRoleId").withSchema("userGroups"), + db.ref("groupCreatedAt").withSchema("userGroups"), + db.ref("groupUpdatedAt").withSchema("userGroups"), + db.ref("groupCustomRolePermission").withSchema("userGroups") + ); + + const [formatedDoc] = sqlNestRelationships({ + data: membership, + key: "id", + parentMapper: (el) => + OrgMembershipsSchema.extend({ + permissions: z.unknown(), + orgAuthEnforced: z.boolean().optional().nullable(), + customRoleSlug: z.string().optional().nullable() + }).parse(el), + childrenMapper: [ + { + key: "groupId", + label: "groups" as const, + mapper: ({ + groupId, + groupUpdatedAt, + groupCreatedAt, + groupRole, + groupRoleId, + groupCustomRolePermission, + groupName, + groupSlug, + groupOrgId + }) => ({ + id: groupId, + updatedAt: groupUpdatedAt, + createdAt: groupCreatedAt, + role: groupRole, + roleId: groupRoleId, + customRolePermission: groupCustomRolePermission, + name: groupName, + slug: groupSlug, + orgId: groupOrgId + }) + } + ] + }); + + return formatedDoc; } catch (error) { throw new DatabaseError({ error, name: "GetOrgPermission" }); } @@ -28,7 +109,8 @@ export const permissionDALFactory = (db: TDbClient) => { const getOrgIdentityPermission = async (identityId: string, orgId: string) => { try { - const membership = await db(TableName.IdentityOrgMembership) + const membership = await db + .replicaNode()(TableName.IdentityOrgMembership) .leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`) .join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`) .where("identityId", identityId) @@ -45,44 +127,31 @@ export const permissionDALFactory = (db: TDbClient) => { const getProjectPermission = async (userId: string, projectId: string) => { try { - const groups: string[] = await db(TableName.GroupProjectMembership) - .where(`${TableName.GroupProjectMembership}.projectId`, projectId) - .pluck(`${TableName.GroupProjectMembership}.groupId`); - - const groupDocs = await db(TableName.UserGroupMembership) - .where(`${TableName.UserGroupMembership}.userId`, userId) - .whereIn(`${TableName.UserGroupMembership}.groupId`, groups) - .join( - TableName.GroupProjectMembership, - `${TableName.GroupProjectMembership}.groupId`, - `${TableName.UserGroupMembership}.groupId` - ) - .join( + const docs = await db + .replicaNode()(TableName.Users) + .where(`${TableName.Users}.id`, userId) + .leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`) + .leftJoin(TableName.GroupProjectMembership, (queryBuilder) => { + void queryBuilder + .on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId])) + .andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`); + }) + .leftJoin( TableName.GroupProjectMembershipRole, `${TableName.GroupProjectMembershipRole}.projectMembershipId`, `${TableName.GroupProjectMembership}.id` ) - .leftJoin( - TableName.ProjectRoles, + .leftJoin( + { groupCustomRoles: TableName.ProjectRoles }, `${TableName.GroupProjectMembershipRole}.customRoleId`, - `${TableName.ProjectRoles}.id` + `groupCustomRoles.id` ) - .join(TableName.Project, `${TableName.GroupProjectMembership}.projectId`, `${TableName.Project}.id`) - .join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`) - .select(selectAllTableCols(TableName.GroupProjectMembershipRole)) - .select( - db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"), - db.ref("createdAt").withSchema(TableName.GroupProjectMembership).as("membershipCreatedAt"), - db.ref("updatedAt").withSchema(TableName.GroupProjectMembership).as("membershipUpdatedAt"), - db.ref("projectId").withSchema(TableName.GroupProjectMembership), - db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"), - db.ref("orgId").withSchema(TableName.Project), - db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug") - ) - .select("permissions"); - - const docs = await db(TableName.ProjectMembership) - .join( + .leftJoin(TableName.ProjectMembership, (queryBuilder) => { + void queryBuilder + .on(`${TableName.ProjectMembership}.projectId`, db.raw("?", [projectId])) + .andOn(`${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`); + }) + .leftJoin( TableName.ProjectUserMembershipRole, `${TableName.ProjectUserMembershipRole}.projectMembershipId`, `${TableName.ProjectMembership}.id` @@ -92,135 +161,247 @@ export const permissionDALFactory = (db: TDbClient) => { `${TableName.ProjectUserMembershipRole}.customRoleId`, `${TableName.ProjectRoles}.id` ) - .leftJoin( - TableName.ProjectUserAdditionalPrivilege, - `${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`, - `${TableName.ProjectMembership}.id` - ) - .join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`) + .leftJoin(TableName.ProjectUserAdditionalPrivilege, (queryBuilder) => { + void queryBuilder + .on(`${TableName.ProjectUserAdditionalPrivilege}.projectId`, db.raw("?", [projectId])) + .andOn(`${TableName.ProjectUserAdditionalPrivilege}.userId`, `${TableName.Users}.id`); + }) + .join(TableName.Project, `${TableName.Project}.id`, db.raw("?", [projectId])) .join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`) - .where("userId", userId) - .where(`${TableName.ProjectMembership}.projectId`, projectId) - .select(selectAllTableCols(TableName.ProjectUserMembershipRole)) + .leftJoin(TableName.IdentityMetadata, (queryBuilder) => { + void queryBuilder + .on(`${TableName.Users}.id`, `${TableName.IdentityMetadata}.userId`) + .andOn(`${TableName.Organization}.id`, `${TableName.IdentityMetadata}.orgId`); + }) .select( + db.ref("id").withSchema(TableName.Users).as("userId"), + db.ref("username").withSchema(TableName.Users).as("username"), + // groups specific + db.ref("id").withSchema(TableName.GroupProjectMembership).as("groupMembershipId"), + db.ref("createdAt").withSchema(TableName.GroupProjectMembership).as("groupMembershipCreatedAt"), + db.ref("updatedAt").withSchema(TableName.GroupProjectMembership).as("groupMembershipUpdatedAt"), + db.ref("slug").withSchema("groupCustomRoles").as("userGroupProjectMembershipRoleCustomRoleSlug"), + db.ref("permissions").withSchema("groupCustomRoles").as("userGroupProjectMembershipRolePermission"), + db.ref("id").withSchema(TableName.GroupProjectMembershipRole).as("userGroupProjectMembershipRoleId"), + db.ref("role").withSchema(TableName.GroupProjectMembershipRole).as("userGroupProjectMembershipRole"), + db + .ref("customRoleId") + .withSchema(TableName.GroupProjectMembershipRole) + .as("userGroupProjectMembershipRoleCustomRoleId"), + db + .ref("isTemporary") + .withSchema(TableName.GroupProjectMembershipRole) + .as("userGroupProjectMembershipRoleIsTemporary"), + db + .ref("temporaryMode") + .withSchema(TableName.GroupProjectMembershipRole) + .as("userGroupProjectMembershipRoleTemporaryMode"), + db + .ref("temporaryRange") + .withSchema(TableName.GroupProjectMembershipRole) + .as("userGroupProjectMembershipRoleTemporaryRange"), + db + .ref("temporaryAccessStartTime") + .withSchema(TableName.GroupProjectMembershipRole) + .as("userGroupProjectMembershipRoleTemporaryAccessStartTime"), + db + .ref("temporaryAccessEndTime") + .withSchema(TableName.GroupProjectMembershipRole) + .as("userGroupProjectMembershipRoleTemporaryAccessEndTime"), + // user specific db.ref("id").withSchema(TableName.ProjectMembership).as("membershipId"), db.ref("createdAt").withSchema(TableName.ProjectMembership).as("membershipCreatedAt"), db.ref("updatedAt").withSchema(TableName.ProjectMembership).as("membershipUpdatedAt"), - db.ref("projectId").withSchema(TableName.ProjectMembership), - db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"), - db.ref("orgId").withSchema(TableName.Project), - db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"), - db.ref("permissions").withSchema(TableName.ProjectRoles), - db.ref("id").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApId"), - db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApPermissions"), - db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryMode"), - db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApIsTemporary"), - db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryRange"), + db.ref("slug").withSchema(TableName.ProjectRoles).as("userProjectMembershipRoleCustomRoleSlug"), + db.ref("permissions").withSchema(TableName.ProjectRoles).as("userProjectCustomRolePermission"), + db.ref("id").withSchema(TableName.ProjectUserMembershipRole).as("userProjectMembershipRoleId"), + db.ref("role").withSchema(TableName.ProjectUserMembershipRole).as("userProjectMembershipRole"), + db + .ref("temporaryMode") + .withSchema(TableName.ProjectUserMembershipRole) + .as("userProjectMembershipRoleTemporaryMode"), + db + .ref("isTemporary") + .withSchema(TableName.ProjectUserMembershipRole) + .as("userProjectMembershipRoleIsTemporary"), + db + .ref("temporaryRange") + .withSchema(TableName.ProjectUserMembershipRole) + .as("userProjectMembershipRoleTemporaryRange"), + db + .ref("temporaryAccessStartTime") + .withSchema(TableName.ProjectUserMembershipRole) + .as("userProjectMembershipRoleTemporaryAccessStartTime"), + db + .ref("temporaryAccessEndTime") + .withSchema(TableName.ProjectUserMembershipRole) + .as("userProjectMembershipRoleTemporaryAccessEndTime"), + db.ref("id").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userAdditionalPrivilegesId"), + db + .ref("permissions") + .withSchema(TableName.ProjectUserAdditionalPrivilege) + .as("userAdditionalPrivilegesPermissions"), + db + .ref("temporaryMode") + .withSchema(TableName.ProjectUserAdditionalPrivilege) + .as("userAdditionalPrivilegesTemporaryMode"), + db + .ref("isTemporary") + .withSchema(TableName.ProjectUserAdditionalPrivilege) + .as("userAdditionalPrivilegesIsTemporary"), + db + .ref("temporaryRange") + .withSchema(TableName.ProjectUserAdditionalPrivilege) + .as("userAdditionalPrivilegesTemporaryRange"), + db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userAdditionalPrivilegesUserId"), db .ref("temporaryAccessStartTime") .withSchema(TableName.ProjectUserAdditionalPrivilege) - .as("userApTemporaryAccessStartTime"), + .as("userAdditionalPrivilegesTemporaryAccessStartTime"), db .ref("temporaryAccessEndTime") .withSchema(TableName.ProjectUserAdditionalPrivilege) - .as("userApTemporaryAccessEndTime") + .as("userAdditionalPrivilegesTemporaryAccessEndTime"), + // general + db.ref("id").withSchema(TableName.IdentityMetadata).as("metadataId"), + db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"), + db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue"), + db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"), + db.ref("orgId").withSchema(TableName.Project), + db.ref("id").withSchema(TableName.Project).as("projectId") ); - const permission = sqlNestRelationships({ + const [userPermission] = sqlNestRelationships({ data: docs, key: "projectId", - parentMapper: ({ orgId, orgAuthEnforced, membershipId, membershipCreatedAt, membershipUpdatedAt }) => ({ + parentMapper: ({ + orgId, + username, + orgAuthEnforced, + membershipId, + groupMembershipId, + membershipCreatedAt, + groupMembershipCreatedAt, + groupMembershipUpdatedAt, + membershipUpdatedAt + }) => ({ orgId, orgAuthEnforced, userId, - id: membershipId, projectId, - createdAt: membershipCreatedAt, - updatedAt: membershipUpdatedAt + username, + id: membershipId || groupMembershipId, + createdAt: membershipCreatedAt || groupMembershipCreatedAt, + updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt }), childrenMapper: [ { - key: "id", - label: "roles" as const, - mapper: (data) => - ProjectUserMembershipRolesSchema.extend({ - permissions: z.unknown(), - customRoleSlug: z.string().optional().nullable() - }).parse(data) + key: "userGroupProjectMembershipRoleId", + label: "userGroupRoles" as const, + mapper: ({ + userGroupProjectMembershipRoleId, + userGroupProjectMembershipRole, + userGroupProjectMembershipRolePermission, + userGroupProjectMembershipRoleCustomRoleSlug, + userGroupProjectMembershipRoleIsTemporary, + userGroupProjectMembershipRoleTemporaryMode, + userGroupProjectMembershipRoleTemporaryAccessEndTime, + userGroupProjectMembershipRoleTemporaryAccessStartTime, + userGroupProjectMembershipRoleTemporaryRange + }) => ({ + id: userGroupProjectMembershipRoleId, + role: userGroupProjectMembershipRole, + customRoleSlug: userGroupProjectMembershipRoleCustomRoleSlug, + permissions: userGroupProjectMembershipRolePermission, + temporaryRange: userGroupProjectMembershipRoleTemporaryRange, + temporaryMode: userGroupProjectMembershipRoleTemporaryMode, + temporaryAccessStartTime: userGroupProjectMembershipRoleTemporaryAccessStartTime, + temporaryAccessEndTime: userGroupProjectMembershipRoleTemporaryAccessEndTime, + isTemporary: userGroupProjectMembershipRoleIsTemporary + }) }, { - key: "userApId", + key: "userProjectMembershipRoleId", + label: "projecMembershiptRoles" as const, + mapper: ({ + userProjectMembershipRoleId, + userProjectMembershipRole, + userProjectCustomRolePermission, + userProjectMembershipRoleIsTemporary, + userProjectMembershipRoleTemporaryMode, + userProjectMembershipRoleTemporaryRange, + userProjectMembershipRoleTemporaryAccessEndTime, + userProjectMembershipRoleTemporaryAccessStartTime, + userProjectMembershipRoleCustomRoleSlug + }) => ({ + id: userProjectMembershipRoleId, + role: userProjectMembershipRole, + customRoleSlug: userProjectMembershipRoleCustomRoleSlug, + permissions: userProjectCustomRolePermission, + temporaryRange: userProjectMembershipRoleTemporaryRange, + temporaryMode: userProjectMembershipRoleTemporaryMode, + temporaryAccessStartTime: userProjectMembershipRoleTemporaryAccessStartTime, + temporaryAccessEndTime: userProjectMembershipRoleTemporaryAccessEndTime, + isTemporary: userProjectMembershipRoleIsTemporary + }) + }, + { + key: "userAdditionalPrivilegesId", label: "additionalPrivileges" as const, mapper: ({ - userApId, - userApPermissions, - userApIsTemporary, - userApTemporaryMode, - userApTemporaryRange, - userApTemporaryAccessEndTime, - userApTemporaryAccessStartTime + userAdditionalPrivilegesId, + userAdditionalPrivilegesPermissions, + userAdditionalPrivilegesIsTemporary, + userAdditionalPrivilegesTemporaryMode, + userAdditionalPrivilegesTemporaryRange, + userAdditionalPrivilegesTemporaryAccessEndTime, + userAdditionalPrivilegesTemporaryAccessStartTime }) => ({ - id: userApId, - permissions: userApPermissions, - temporaryRange: userApTemporaryRange, - temporaryMode: userApTemporaryMode, - temporaryAccessEndTime: userApTemporaryAccessEndTime, - temporaryAccessStartTime: userApTemporaryAccessStartTime, - isTemporary: userApIsTemporary + id: userAdditionalPrivilegesId, + permissions: userAdditionalPrivilegesPermissions, + temporaryRange: userAdditionalPrivilegesTemporaryRange, + temporaryMode: userAdditionalPrivilegesTemporaryMode, + temporaryAccessStartTime: userAdditionalPrivilegesTemporaryAccessStartTime, + temporaryAccessEndTime: userAdditionalPrivilegesTemporaryAccessEndTime, + isTemporary: userAdditionalPrivilegesIsTemporary + }) + }, + { + key: "metadataId", + label: "metadata" as const, + mapper: ({ metadataKey, metadataValue, metadataId }) => ({ + id: metadataId, + key: metadataKey, + value: metadataValue }) } ] }); - const groupPermission = groupDocs.length - ? sqlNestRelationships({ - data: groupDocs, - key: "projectId", - parentMapper: ({ orgId, orgAuthEnforced, membershipId, membershipCreatedAt, membershipUpdatedAt }) => ({ - orgId, - orgAuthEnforced, - userId, - id: membershipId, - projectId, - createdAt: membershipCreatedAt, - updatedAt: membershipUpdatedAt - }), - childrenMapper: [ - { - key: "id", - label: "roles" as const, - mapper: (data) => - ProjectUserMembershipRolesSchema.extend({ - permissions: z.unknown(), - customRoleSlug: z.string().optional().nullable() - }).parse(data) - } - ] - }) - : []; - - if (!permission?.[0] && !groupPermission[0]) return undefined; + if (!userPermission) return undefined; + if (!userPermission?.userGroupRoles?.[0] && !userPermission?.projecMembershiptRoles?.[0]) return undefined; // when introducting cron mode change it here const activeRoles = - permission?.[0]?.roles?.filter( + userPermission?.projecMembershiptRoles?.filter( ({ isTemporary, temporaryAccessEndTime }) => !isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime) ) ?? []; const activeGroupRoles = - groupPermission?.[0]?.roles?.filter( + userPermission?.userGroupRoles?.filter( ({ isTemporary, temporaryAccessEndTime }) => !isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime) ) ?? []; - const activeAdditionalPrivileges = permission?.[0]?.additionalPrivileges?.filter( - ({ isTemporary, temporaryAccessEndTime }) => - !isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime) - ); + const activeAdditionalPrivileges = + userPermission?.additionalPrivileges?.filter( + ({ isTemporary, temporaryAccessEndTime }) => + !isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime) + ) ?? []; return { - ...(permission[0] || groupPermission[0]), + ...userPermission, roles: [...activeRoles, ...activeGroupRoles], additionalPrivileges: activeAdditionalPrivileges }; @@ -231,12 +412,14 @@ export const permissionDALFactory = (db: TDbClient) => { const getProjectIdentityPermission = async (identityId: string, projectId: string) => { try { - const docs = await db(TableName.IdentityProjectMembership) + const docs = await db + .replicaNode()(TableName.IdentityProjectMembership) .join( TableName.IdentityProjectMembershipRole, `${TableName.IdentityProjectMembershipRole}.projectMembershipId`, `${TableName.IdentityProjectMembership}.id` ) + .join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityProjectMembership}.identityId`) .leftJoin( TableName.ProjectRoles, `${TableName.IdentityProjectMembershipRole}.customRoleId`, @@ -253,11 +436,17 @@ export const permissionDALFactory = (db: TDbClient) => { `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id` ) - .where("identityId", identityId) + .leftJoin(TableName.IdentityMetadata, (queryBuilder) => { + void queryBuilder + .on(`${TableName.Identity}.id`, `${TableName.IdentityMetadata}.identityId`) + .andOn(`${TableName.Project}.orgId`, `${TableName.IdentityMetadata}.orgId`); + }) + .where(`${TableName.IdentityProjectMembership}.identityId`, identityId) .where(`${TableName.IdentityProjectMembership}.projectId`, projectId) .select(selectAllTableCols(TableName.IdentityProjectMembershipRole)) .select( db.ref("id").withSchema(TableName.IdentityProjectMembership).as("membershipId"), + db.ref("name").withSchema(TableName.Identity).as("identityName"), db.ref("orgId").withSchema(TableName.Project).as("orgId"), // Now you can select orgId from Project db.ref("createdAt").withSchema(TableName.IdentityProjectMembership).as("membershipCreatedAt"), db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership).as("membershipUpdatedAt"), @@ -281,15 +470,19 @@ export const permissionDALFactory = (db: TDbClient) => { db .ref("temporaryAccessEndTime") .withSchema(TableName.IdentityProjectAdditionalPrivilege) - .as("identityApTemporaryAccessEndTime") + .as("identityApTemporaryAccessEndTime"), + db.ref("id").withSchema(TableName.IdentityMetadata).as("metadataId"), + db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"), + db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue") ); const permission = sqlNestRelationships({ data: docs, key: "membershipId", - parentMapper: ({ membershipId, membershipCreatedAt, membershipUpdatedAt, orgId }) => ({ + parentMapper: ({ membershipId, membershipCreatedAt, membershipUpdatedAt, orgId, identityName }) => ({ id: membershipId, identityId, + username: identityName, projectId, createdAt: membershipCreatedAt, updatedAt: membershipUpdatedAt, @@ -327,6 +520,15 @@ export const permissionDALFactory = (db: TDbClient) => { temporaryAccessStartTime: identityApTemporaryAccessStartTime, isTemporary: identityApIsTemporary }) + }, + { + key: "metadataId", + label: "metadata" as const, + mapper: ({ metadataKey, metadataValue, metadataId }) => ({ + id: metadataId, + key: metadataKey, + value: metadataValue + }) } ] }); diff --git a/backend/src/ee/services/permission/permission-fns.ts b/backend/src/ee/services/permission/permission-fns.ts index eda19c2150..80a58db0a5 100644 --- a/backend/src/ee/services/permission/permission-fns.ts +++ b/backend/src/ee/services/permission/permission-fns.ts @@ -1,5 +1,5 @@ import { TOrganizations } from "@app/db/schemas"; -import { UnauthorizedError } from "@app/lib/errors"; +import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors"; import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type"; function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) { @@ -14,14 +14,33 @@ function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) { ].includes(actorAuthMethod); } -function validateOrgSAML(actorAuthMethod: ActorAuthMethod, isSamlEnforced: TOrganizations["authEnforced"]) { +function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrganizations["authEnforced"]) { if (actorAuthMethod === undefined) { throw new UnauthorizedError({ name: "No auth method defined" }); } - if (isSamlEnforced && actorAuthMethod !== null && !isAuthMethodSaml(actorAuthMethod)) { - throw new UnauthorizedError({ name: "Cannot access org-scoped resource" }); + if ( + isOrgSsoEnforced && + actorAuthMethod !== null && + !isAuthMethodSaml(actorAuthMethod) && + actorAuthMethod !== AuthMethod.OIDC + ) { + throw new ForbiddenRequestError({ name: "Org auth enforced. Cannot access org-scoped resource" }); } } -export { isAuthMethodSaml, validateOrgSAML }; +const escapeHandlebarsMissingMetadata = (obj: Record) => { + const handler = { + get(target: Record, prop: string) { + if (!(prop in target)) { + // eslint-disable-next-line no-param-reassign + target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property + } + return target[prop]; + } + }; + + return new Proxy(obj, handler); +}; + +export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO }; diff --git a/backend/src/ee/services/permission/permission-service-types.ts b/backend/src/ee/services/permission/permission-service-types.ts new file mode 100644 index 0000000000..620e7a61ce --- /dev/null +++ b/backend/src/ee/services/permission/permission-service-types.ts @@ -0,0 +1,9 @@ +export type TBuildProjectPermissionDTO = { + permissions?: unknown; + role: string; +}[]; + +export type TBuildOrgPermissionDTO = { + permissions?: unknown; + role: string; +}[]; diff --git a/backend/src/ee/services/permission/permission-service.ts b/backend/src/ee/services/permission/permission-service.ts index f4e4237975..13645b8f17 100644 --- a/backend/src/ee/services/permission/permission-service.ts +++ b/backend/src/ee/services/permission/permission-service.ts @@ -1,6 +1,7 @@ import { createMongoAbility, MongoAbility, RawRuleOf } from "@casl/ability"; import { PackRule, unpackRules } from "@casl/ability/extra"; import { MongoQuery } from "@ucast/mongo2js"; +import handlebars from "handlebars"; import { OrgMembershipRole, @@ -10,7 +11,8 @@ import { TProjectMemberships } from "@app/db/schemas"; import { conditionsMatcher } from "@app/lib/casl"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { objectify } from "@app/lib/fn"; import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type"; import { TOrgRoleDALFactory } from "@app/services/org/org-role-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; @@ -19,8 +21,8 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission"; import { TPermissionDALFactory } from "./permission-dal"; -import { validateOrgSAML } from "./permission-fns"; -import { TBuildProjectPermissionDTO } from "./permission-types"; +import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns"; +import { TBuildOrgPermissionDTO, TBuildProjectPermissionDTO } from "./permission-service-types"; import { buildServiceTokenProjectPermission, projectAdminPermissions, @@ -47,29 +49,32 @@ export const permissionServiceFactory = ({ serviceTokenDAL, projectDAL }: TPermissionServiceFactoryDep) => { - const buildOrgPermission = (role: string, permission?: unknown) => { - switch (role) { - case OrgMembershipRole.Admin: - return orgAdminPermissions; - case OrgMembershipRole.Member: - return orgMemberPermissions; - case OrgMembershipRole.NoAccess: - return orgNoAccessPermissions; - case OrgMembershipRole.Custom: - return createMongoAbility( - unpackRules>>( - permission as PackRule>>[] - ), - { - conditionsMatcher - } - ); - default: - throw new BadRequestError({ name: "OrgRoleInvalid", message: "Org role not found" }); - } + const buildOrgPermission = (orgUserRoles: TBuildOrgPermissionDTO) => { + const rules = orgUserRoles + .map(({ role, permissions }) => { + switch (role) { + case OrgMembershipRole.Admin: + return orgAdminPermissions; + case OrgMembershipRole.Member: + return orgMemberPermissions; + case OrgMembershipRole.NoAccess: + return orgNoAccessPermissions; + case OrgMembershipRole.Custom: + return unpackRules>>( + permissions as PackRule>>[] + ); + default: + throw new NotFoundError({ name: "OrgRoleInvalid", message: `Organization role '${role}' not found` }); + } + }) + .reduce((prev, curr) => prev.concat(curr), []); + + return createMongoAbility(rules, { + conditionsMatcher + }); }; - const buildProjectPermission = (projectUserRoles: TBuildProjectPermissionDTO) => { + const buildProjectPermissionRules = (projectUserRoles: TBuildProjectPermissionDTO) => { const rules = projectUserRoles .map(({ role, permissions }) => { switch (role) { @@ -87,17 +92,15 @@ export const permissionServiceFactory = ({ ); } default: - throw new BadRequestError({ + throw new NotFoundError({ name: "ProjectRoleInvalid", - message: "Project role not found" + message: `Project role '${role}' not found` }); } }) - .reduce((curr, prev) => prev.concat(curr), []); + .reduce((prev, curr) => prev.concat(curr), []); - return createMongoAbility(rules, { - conditionsMatcher - }); + return rules; }; /* @@ -109,10 +112,13 @@ export const permissionServiceFactory = ({ authMethod: ActorAuthMethod, userOrgId?: string ) => { + // when token is scoped, ensure the passed org id is same as user org id + if (userOrgId && userOrgId !== orgId) + throw new ForbiddenRequestError({ message: "Invalid user token. Scoped to different organization." }); const membership = await permissionDAL.getOrgPermission(userId, orgId); - if (!membership) throw new UnauthorizedError({ name: "User not in org" }); + if (!membership) throw new ForbiddenRequestError({ name: "You are not apart of this organization" }); if (membership.role === OrgMembershipRole.Custom && !membership.permissions) { - throw new BadRequestError({ name: "Custom permission not found" }); + throw new BadRequestError({ name: "Custom organization permission not found" }); } // If the org ID is API_KEY, the request is being made with an API Key. @@ -121,21 +127,30 @@ export const permissionServiceFactory = ({ // Extra: This means that when users are using API keys to make requests, they can't use slug-based routes. // Slug-based routes depend on the organization ID being present on the request, since project slugs aren't globally unique, and we need a way to filter by organization. if (userOrgId !== "API_KEY" && membership.orgId !== userOrgId) { - throw new UnauthorizedError({ name: "You are not logged into this organization" }); + throw new ForbiddenRequestError({ name: "You are not logged into this organization" }); } - validateOrgSAML(authMethod, membership.orgAuthEnforced); + validateOrgSSO(authMethod, membership.orgAuthEnforced); - return { permission: buildOrgPermission(membership.role, membership.permissions), membership }; + const finalPolicyRoles = [{ role: membership.role, permissions: membership.permissions }].concat( + membership?.groups?.map(({ role, customRolePermission }) => ({ + role, + permissions: customRolePermission + })) || [] + ); + return { permission: buildOrgPermission(finalPolicyRoles), membership }; }; const getIdentityOrgPermission = async (identityId: string, orgId: string) => { const membership = await permissionDAL.getOrgIdentityPermission(identityId, orgId); - if (!membership) throw new UnauthorizedError({ name: "Identity not in org" }); + if (!membership) throw new ForbiddenRequestError({ name: "Identity is not apart of this organization" }); if (membership.role === OrgMembershipRole.Custom && !membership.permissions) { - throw new BadRequestError({ name: "Custom permission not found" }); + throw new NotFoundError({ name: `Custom organization permission not found for identity ${identityId}` }); } - return { permission: buildOrgPermission(membership.role, membership.permissions), membership }; + return { + permission: buildOrgPermission([{ role: membership.role, permissions: membership.permissions }]), + membership + }; }; const getOrgPermission = async ( @@ -151,8 +166,8 @@ export const permissionServiceFactory = ({ case ActorType.IDENTITY: return getIdentityOrgPermission(id, orgId); default: - throw new UnauthorizedError({ - message: "Permission not defined", + throw new BadRequestError({ + message: "Invalid actor provided", name: "Get org permission" }); } @@ -164,13 +179,16 @@ export const permissionServiceFactory = ({ const isCustomRole = !Object.values(OrgMembershipRole).includes(role as OrgMembershipRole); if (isCustomRole) { const orgRole = await orgRoleDAL.findOne({ slug: role, orgId }); - if (!orgRole) throw new BadRequestError({ message: "Role not found" }); + if (!orgRole) + throw new NotFoundError({ + message: `Specified role '${role}' was not found in the organization with ID '${orgId}'` + }); return { - permission: buildOrgPermission(OrgMembershipRole.Custom, orgRole.permissions), + permission: buildOrgPermission([{ role: OrgMembershipRole.Custom, permissions: orgRole.permissions }]), role: orgRole }; } - return { permission: buildOrgPermission(role, []) }; + return { permission: buildOrgPermission([{ role, permissions: [] }]) }; }; // user permission for a project in an organization @@ -181,12 +199,12 @@ export const permissionServiceFactory = ({ userOrgId?: string ): Promise> => { const userProjectPermission = await permissionDAL.getProjectPermission(userId, projectId); - if (!userProjectPermission) throw new UnauthorizedError({ name: "User not in project" }); + if (!userProjectPermission) throw new ForbiddenRequestError({ name: "User not a part of the specified project" }); if ( userProjectPermission.roles.some(({ role, permissions }) => role === ProjectMembershipRole.Custom && !permissions) ) { - throw new BadRequestError({ name: "Custom permission not found" }); + throw new NotFoundError({ name: "The permission was not found" }); } // If the org ID is API_KEY, the request is being made with an API Key. @@ -195,10 +213,10 @@ export const permissionServiceFactory = ({ // Extra: This means that when users are using API keys to make requests, they can't use slug-based routes. // Slug-based routes depend on the organization ID being present on the request, since project slugs aren't globally unique, and we need a way to filter by organization. if (userOrgId !== "API_KEY" && userProjectPermission.orgId !== userOrgId) { - throw new UnauthorizedError({ name: "You are not logged into this organization" }); + throw new ForbiddenRequestError({ name: "You are not logged into this organization" }); } - validateOrgSAML(authMethod, userProjectPermission.orgAuthEnforced); + validateOrgSSO(authMethod, userProjectPermission.orgAuthEnforced); // join two permissions and pass to build the final permission set const rolePermissions = userProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || []; @@ -208,8 +226,34 @@ export const permissionServiceFactory = ({ permissions })) || []; + const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges)); + const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false }); + const metadataKeyValuePair = escapeHandlebarsMissingMetadata( + objectify( + userProjectPermission.metadata, + (i) => i.key, + (i) => i.value + ) + ); + const interpolateRules = templatedRules( + { + identity: { + id: userProjectPermission.userId, + username: userProjectPermission.username, + metadata: metadataKeyValuePair + } + }, + { data: false } + ); + const permission = createMongoAbility( + JSON.parse(interpolateRules) as RawRuleOf>[], + { + conditionsMatcher + } + ); + return { - permission: buildProjectPermission(rolePermissions.concat(additionalPrivileges)), + permission, membership: userProjectPermission, hasRole: (role: string) => userProjectPermission.roles.findIndex( @@ -224,18 +268,21 @@ export const permissionServiceFactory = ({ identityOrgId: string | undefined ): Promise> => { const identityProjectPermission = await permissionDAL.getProjectIdentityPermission(identityId, projectId); - if (!identityProjectPermission) throw new UnauthorizedError({ name: "Identity not in project" }); + if (!identityProjectPermission) + throw new ForbiddenRequestError({ + name: `Identity is not a member of the specified project with ID '${projectId}'` + }); if ( identityProjectPermission.roles.some( ({ role, permissions }) => role === ProjectMembershipRole.Custom && !permissions ) ) { - throw new BadRequestError({ name: "Custom permission not found" }); + throw new NotFoundError({ name: "Custom permission not found" }); } if (identityProjectPermission.orgId !== identityOrgId) { - throw new UnauthorizedError({ name: "You are not a member of this organization" }); + throw new ForbiddenRequestError({ name: "Identity is not a member of the specified organization" }); } const rolePermissions = @@ -246,8 +293,35 @@ export const permissionServiceFactory = ({ permissions })) || []; + const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges)); + const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false }); + const metadataKeyValuePair = escapeHandlebarsMissingMetadata( + objectify( + identityProjectPermission.metadata, + (i) => i.key, + (i) => i.value + ) + ); + + const interpolateRules = templatedRules( + { + identity: { + id: identityProjectPermission.identityId, + username: identityProjectPermission.username, + metadata: metadataKeyValuePair + } + }, + { data: false } + ); + const permission = createMongoAbility( + JSON.parse(interpolateRules) as RawRuleOf>[], + { + conditionsMatcher + } + ); + return { - permission: buildProjectPermission(rolePermissions.concat(additionalPrivileges)), + permission, membership: identityProjectPermission, hasRole: (role: string) => identityProjectPermission.roles.findIndex( @@ -262,25 +336,27 @@ export const permissionServiceFactory = ({ actorOrgId: string | undefined ) => { const serviceToken = await serviceTokenDAL.findById(serviceTokenId); - if (!serviceToken) throw new BadRequestError({ message: "Service token not found" }); + if (!serviceToken) throw new NotFoundError({ message: `Service token with ID '${serviceTokenId}' not found` }); const serviceTokenProject = await projectDAL.findById(serviceToken.projectId); if (!serviceTokenProject) throw new BadRequestError({ message: "Service token not linked to a project" }); if (serviceTokenProject.orgId !== actorOrgId) { - throw new UnauthorizedError({ message: "Service token not a part of this organization" }); + throw new ForbiddenRequestError({ message: "Service token not a part of the specified organization" }); } - if (serviceToken.projectId !== projectId) - throw new UnauthorizedError({ - message: "Failed to find service authorization for given project" + if (serviceToken.projectId !== projectId) { + throw new ForbiddenRequestError({ + name: `Service token not a part of the specified project with ID ${projectId}` }); + } - if (serviceTokenProject.orgId !== actorOrgId) - throw new UnauthorizedError({ - message: "Failed to find service authorization for given project" + if (serviceTokenProject.orgId !== actorOrgId) { + throw new ForbiddenRequestError({ + message: `Service token not a part of the specified organization with ID ${actorOrgId}` }); + } const scopes = ServiceTokenScopes.parse(serviceToken.scopes || []); return { @@ -320,8 +396,8 @@ export const permissionServiceFactory = ({ case ActorType.IDENTITY: return getIdentityProjectPermission(id, projectId, actorOrgId) as Promise>; default: - throw new UnauthorizedError({ - message: "Permission not defined", + throw new BadRequestError({ + message: "Invalid actor provided", name: "Get project permission" }); } @@ -331,15 +407,23 @@ export const permissionServiceFactory = ({ const isCustomRole = !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole); if (isCustomRole) { const projectRole = await projectRoleDAL.findOne({ slug: role, projectId }); - if (!projectRole) throw new BadRequestError({ message: "Role not found" }); + if (!projectRole) throw new NotFoundError({ message: `Specified role was not found: ${role}` }); + const rules = buildProjectPermissionRules([ + { role: ProjectMembershipRole.Custom, permissions: projectRole.permissions } + ]); return { - permission: buildProjectPermission([ - { role: ProjectMembershipRole.Custom, permissions: projectRole.permissions } - ]), + permission: createMongoAbility(rules, { + conditionsMatcher + }), role: projectRole }; } - return { permission: buildProjectPermission([{ role, permissions: [] }]) }; + + const rules = buildProjectPermissionRules([{ role, permissions: [] }]); + const permission = createMongoAbility(rules, { + conditionsMatcher + }); + return { permission }; }; return { @@ -350,6 +434,6 @@ export const permissionServiceFactory = ({ getOrgPermissionByRole, getProjectPermissionByRole, buildOrgPermission, - buildProjectPermission + buildProjectPermissionRules }; }; diff --git a/backend/src/ee/services/permission/permission-types.ts b/backend/src/ee/services/permission/permission-types.ts index a35958ffd8..8df85054df 100644 --- a/backend/src/ee/services/permission/permission-types.ts +++ b/backend/src/ee/services/permission/permission-types.ts @@ -1,4 +1,47 @@ -export type TBuildProjectPermissionDTO = { - permissions?: unknown; - role: string; -}[]; +import picomatch from "picomatch"; +import { z } from "zod"; + +export enum PermissionConditionOperators { + $IN = "$in", + $ALL = "$all", + $REGEX = "$regex", + $EQ = "$eq", + $NEQ = "$ne", + $GLOB = "$glob" +} + +export const PermissionConditionSchema = { + [PermissionConditionOperators.$IN]: z.string().trim().min(1).array(), + [PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(), + [PermissionConditionOperators.$REGEX]: z + .string() + .min(1) + .refine( + (el) => { + try { + // eslint-disable-next-line no-new + new RegExp(el); + return true; + } catch { + return false; + } + }, + { message: "Invalid regex pattern" } + ), + [PermissionConditionOperators.$EQ]: z.string().min(1), + [PermissionConditionOperators.$NEQ]: z.string().min(1), + [PermissionConditionOperators.$GLOB]: z + .string() + .min(1) + .refine( + (el) => { + try { + picomatch.parse([el]); + return true; + } catch { + return false; + } + }, + { message: "Invalid glob pattern" } + ) +}; diff --git a/backend/src/ee/services/permission/project-permission.ts b/backend/src/ee/services/permission/project-permission.ts index b24024bd4e..591cdd343f 100644 --- a/backend/src/ee/services/permission/project-permission.ts +++ b/backend/src/ee/services/permission/project-permission.ts @@ -1,6 +1,10 @@ import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability"; +import { z } from "zod"; import { conditionsMatcher } from "@app/lib/casl"; +import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission"; + +import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types"; export enum ProjectPermissionActions { Read = "read", @@ -9,6 +13,23 @@ export enum ProjectPermissionActions { Delete = "delete" } +export enum ProjectPermissionCmekActions { + Read = "read", + Create = "create", + Edit = "edit", + Delete = "delete", + Encrypt = "encrypt", + Decrypt = "decrypt" +} + +export enum ProjectPermissionDynamicSecretActions { + ReadRootCredential = "read-root-credential", + CreateRootCredential = "create-root-credential", + EditRootCredential = "edit-root-credential", + DeleteRootCredential = "delete-root-credential", + Lease = "lease" +} + export enum ProjectPermissionSub { Role = "role", Member = "member", @@ -23,13 +44,40 @@ export enum ProjectPermissionSub { IpAllowList = "ip-allowlist", Project = "workspace", Secrets = "secrets", + SecretFolders = "secret-folders", + SecretImports = "secret-imports", + DynamicSecrets = "dynamic-secrets", SecretRollback = "secret-rollback", SecretApproval = "secret-approval", SecretRotation = "secret-rotation", - Identity = "identity" + Identity = "identity", + CertificateAuthorities = "certificate-authorities", + Certificates = "certificates", + CertificateTemplates = "certificate-templates", + PkiAlerts = "pki-alerts", + PkiCollections = "pki-collections", + Kms = "kms", + Cmek = "cmek" } -type SubjectFields = { +export type SecretSubjectFields = { + environment: string; + secretPath: string; + secretName?: string; + secretTags?: string[]; +}; + +export type SecretFolderSubjectFields = { + environment: string; + secretPath: string; +}; + +export type DynamicSecretSubjectFields = { + environment: string; + secretPath: string; +}; + +export type SecretImportSubjectFields = { environment: string; secretPath: string; }; @@ -37,7 +85,28 @@ type SubjectFields = { export type ProjectPermissionSet = | [ ProjectPermissionActions, - ProjectPermissionSub.Secrets | (ForcedSubject & SubjectFields) + ProjectPermissionSub.Secrets | (ForcedSubject & SecretSubjectFields) + ] + | [ + ProjectPermissionActions, + ( + | ProjectPermissionSub.SecretFolders + | (ForcedSubject & SecretFolderSubjectFields) + ) + ] + | [ + ProjectPermissionDynamicSecretActions, + ( + | ProjectPermissionSub.DynamicSecrets + | (ForcedSubject & DynamicSecretSubjectFields) + ) + ] + | [ + ProjectPermissionActions, + ( + | ProjectPermissionSub.SecretImports + | (ForcedSubject & SecretImportSubjectFields) + ) ] | [ProjectPermissionActions, ProjectPermissionSub.Role] | [ProjectPermissionActions, ProjectPermissionSub.Tags] @@ -53,95 +122,370 @@ export type ProjectPermissionSet = | [ProjectPermissionActions, ProjectPermissionSub.SecretApproval] | [ProjectPermissionActions, ProjectPermissionSub.SecretRotation] | [ProjectPermissionActions, ProjectPermissionSub.Identity] + | [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities] + | [ProjectPermissionActions, ProjectPermissionSub.Certificates] + | [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates] + | [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts] + | [ProjectPermissionActions, ProjectPermissionSub.PkiCollections] + | [ProjectPermissionCmekActions, ProjectPermissionSub.Cmek] | [ProjectPermissionActions.Delete, ProjectPermissionSub.Project] | [ProjectPermissionActions.Edit, ProjectPermissionSub.Project] | [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback] - | [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]; + | [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback] + | [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]; + +const CASL_ACTION_SCHEMA_NATIVE_ENUM = (actions: ACTION) => + z + .union([z.nativeEnum(actions), z.nativeEnum(actions).array().min(1)]) + .transform((el) => (typeof el === "string" ? [el] : el)); + +const CASL_ACTION_SCHEMA_ENUM = (actions: ACTION) => + z.union([z.enum(actions), z.enum(actions).array().min(1)]).transform((el) => (typeof el === "string" ? [el] : el)); + +// akhilmhdh: don't modify this for v2 +// if you want to update create a new schema +const SecretConditionV1Schema = z + .object({ + environment: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN] + }) + .partial() + ]), + secretPath: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]) + }) + .partial(); + +const SecretConditionV2Schema = z + .object({ + environment: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]), + secretPath: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]), + secretName: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]), + secretTags: z + .object({ + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN] + }) + .partial() + }) + .partial(); + +const GeneralPermissionSchema = [ + z.object({ + subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.SecretRollback).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_ENUM([ProjectPermissionActions.Read, ProjectPermissionActions.Create]).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Member).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Groups).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Role).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Integrations).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Webhooks).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.ServiceTokens).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Settings).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Environments).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Tags).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.AuditLogs).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.IpAllowList).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.CertificateAuthorities).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Certificates).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.CertificateTemplates).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.PkiAlerts).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.PkiCollections).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Project).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_ENUM([ProjectPermissionActions.Edit, ProjectPermissionActions.Delete]).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Kms).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_ENUM([ProjectPermissionActions.Edit]).describe( + "Describe what action an entity can take." + ) + }), + z.object({ + subject: z.literal(ProjectPermissionSub.Cmek).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCmekActions).describe( + "Describe what action an entity can take." + ) + }) +]; + +export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [ + z.object({ + subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ), + conditions: SecretConditionV1Schema.describe( + "When specified, only matching conditions will be allowed to access given resource." + ).optional() + }), + z.object({ + subject: z.literal(ProjectPermissionSub.SecretFolders).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_ENUM([ProjectPermissionActions.Read]).describe( + "Describe what action an entity can take." + ) + }), + ...GeneralPermissionSchema +]); + +export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [ + z.object({ + subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ), + conditions: SecretConditionV2Schema.describe( + "When specified, only matching conditions will be allowed to access given resource." + ).optional() + }), + z.object({ + subject: z.literal(ProjectPermissionSub.SecretFolders).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ), + conditions: SecretConditionV1Schema.describe( + "When specified, only matching conditions will be allowed to access given resource." + ).optional() + }), + z.object({ + subject: z.literal(ProjectPermissionSub.SecretImports).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ), + conditions: SecretConditionV1Schema.describe( + "When specified, only matching conditions will be allowed to access given resource." + ).optional() + }), + z.object({ + subject: z.literal(ProjectPermissionSub.DynamicSecrets).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionDynamicSecretActions).describe( + "Describe what action an entity can take." + ), + conditions: SecretConditionV1Schema.describe( + "When specified, only matching conditions will be allowed to access given resource." + ).optional() + }), + ...GeneralPermissionSchema +]); + +export type TProjectPermissionV2Schema = z.infer; const buildAdminPermissionRules = () => { const { can, rules } = new AbilityBuilder>(createMongoAbility); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Secrets); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets); + // Admins get full access to everything + [ + ProjectPermissionSub.Secrets, + ProjectPermissionSub.SecretFolders, + ProjectPermissionSub.SecretImports, + ProjectPermissionSub.SecretApproval, + ProjectPermissionSub.SecretRotation, + ProjectPermissionSub.Member, + ProjectPermissionSub.Groups, + ProjectPermissionSub.Role, + ProjectPermissionSub.Integrations, + ProjectPermissionSub.Webhooks, + ProjectPermissionSub.Identity, + ProjectPermissionSub.ServiceTokens, + ProjectPermissionSub.Settings, + ProjectPermissionSub.Environments, + ProjectPermissionSub.Tags, + ProjectPermissionSub.AuditLogs, + ProjectPermissionSub.IpAllowList, + ProjectPermissionSub.CertificateAuthorities, + ProjectPermissionSub.Certificates, + ProjectPermissionSub.CertificateTemplates, + ProjectPermissionSub.PkiAlerts, + ProjectPermissionSub.PkiCollections + ].forEach((el) => { + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + el as ProjectPermissionSub + ); + }); - can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval); - can(ProjectPermissionActions.Create, ProjectPermissionSub.SecretApproval); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.SecretApproval); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation); - can(ProjectPermissionActions.Create, ProjectPermissionSub.SecretRotation); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretRotation); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.SecretRotation); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback); - can(ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Member); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Member); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Member); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Member); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Groups); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Groups); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Groups); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Role); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Role); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Role); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Role); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Integrations); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Webhooks); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Webhooks); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Webhooks); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Identity); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Identity); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens); - can(ProjectPermissionActions.Create, ProjectPermissionSub.ServiceTokens); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.ServiceTokens); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.ServiceTokens); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Settings); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Settings); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Settings); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Environments); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Environments); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Environments); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Tags); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Tags); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs); - can(ProjectPermissionActions.Create, ProjectPermissionSub.AuditLogs); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.AuditLogs); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.AuditLogs); - - can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList); - can(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.IpAllowList); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.IpAllowList); - - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Project); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Project); + can( + [ + ProjectPermissionDynamicSecretActions.ReadRootCredential, + ProjectPermissionDynamicSecretActions.EditRootCredential, + ProjectPermissionDynamicSecretActions.CreateRootCredential, + ProjectPermissionDynamicSecretActions.DeleteRootCredential, + ProjectPermissionDynamicSecretActions.Lease + ], + ProjectPermissionSub.DynamicSecrets + ); + can([ProjectPermissionActions.Edit, ProjectPermissionActions.Delete], ProjectPermissionSub.Project); + can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback); + can([ProjectPermissionActions.Edit], ProjectPermissionSub.Kms); + can( + [ + ProjectPermissionCmekActions.Create, + ProjectPermissionCmekActions.Edit, + ProjectPermissionCmekActions.Delete, + ProjectPermissionCmekActions.Read, + ProjectPermissionCmekActions.Encrypt, + ProjectPermissionCmekActions.Decrypt + ], + ProjectPermissionSub.Cmek + ); return rules; }; @@ -150,60 +494,156 @@ export const projectAdminPermissions = buildAdminPermissionRules(); const buildMemberPermissionRules = () => { const { can, rules } = new AbilityBuilder>(createMongoAbility); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Secrets); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Secrets + ); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.SecretFolders + ); + can( + [ + ProjectPermissionDynamicSecretActions.ReadRootCredential, + ProjectPermissionDynamicSecretActions.EditRootCredential, + ProjectPermissionDynamicSecretActions.CreateRootCredential, + ProjectPermissionDynamicSecretActions.DeleteRootCredential, + ProjectPermissionDynamicSecretActions.Lease + ], + ProjectPermissionSub.DynamicSecrets + ); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.SecretImports + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval); - can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation); + can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval); + can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretRotation); - can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback); - can(ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback); + can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Member); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Member); + can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.Member); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups); + can([ProjectPermissionActions.Read], ProjectPermissionSub.Groups); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Integrations); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Integrations + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Webhooks); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Webhooks); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Webhooks); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Webhooks + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Identity); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Identity); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Identity + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens); - can(ProjectPermissionActions.Create, ProjectPermissionSub.ServiceTokens); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.ServiceTokens); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.ServiceTokens); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.ServiceTokens + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Settings); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Settings); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Settings); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Settings + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Environments); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Environments); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Environments); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Environments + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags); - can(ProjectPermissionActions.Create, ProjectPermissionSub.Tags); - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags); - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Tags); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Tags + ); - can(ProjectPermissionActions.Read, ProjectPermissionSub.Role); - can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs); - can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList); + can([ProjectPermissionActions.Read], ProjectPermissionSub.Role); + can([ProjectPermissionActions.Read], ProjectPermissionSub.AuditLogs); + can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList); + + // double check if all CRUD are needed for CA and Certificates + can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateAuthorities); + + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Create, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.Certificates + ); + + can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateTemplates); + + can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiAlerts); + can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiCollections); + + can( + [ + ProjectPermissionCmekActions.Create, + ProjectPermissionCmekActions.Edit, + ProjectPermissionCmekActions.Delete, + ProjectPermissionCmekActions.Read, + ProjectPermissionCmekActions.Encrypt, + ProjectPermissionCmekActions.Decrypt + ], + ProjectPermissionSub.Cmek + ); return rules; }; @@ -214,6 +654,9 @@ const buildViewerPermissionRules = () => { const { can, rules } = new AbilityBuilder>(createMongoAbility); can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets); + can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders); + can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets); + can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports); can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval); can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback); can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation); @@ -229,6 +672,9 @@ const buildViewerPermissionRules = () => { can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags); can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs); can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList); + can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities); + can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates); + can(ProjectPermissionCmekActions.Read, ProjectPermissionSub.Cmek); return rules; }; @@ -248,31 +694,35 @@ export const buildServiceTokenProjectPermission = ( const canRead = permission.includes("read"); const { can, build } = new AbilityBuilder>(createMongoAbility); scopes.forEach(({ secretPath, environment }) => { - if (canWrite) { - // TODO: @Akhi - // @ts-expect-error type - can(ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets, { - secretPath: { $glob: secretPath }, - environment - }); - // @ts-expect-error type - can(ProjectPermissionActions.Create, ProjectPermissionSub.Secrets, { - secretPath: { $glob: secretPath }, - environment - }); - // @ts-expect-error type - can(ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets, { - secretPath: { $glob: secretPath }, - environment - }); - } - if (canRead) { - // @ts-expect-error type - can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets, { - secretPath: { $glob: secretPath }, - environment - }); - } + [ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach( + (subject) => { + if (canWrite) { + // TODO: @Akhi + // @ts-expect-error type + can(ProjectPermissionActions.Edit, subject, { + secretPath: { $glob: secretPath }, + environment + }); + // @ts-expect-error type + can(ProjectPermissionActions.Create, subject, { + secretPath: { $glob: secretPath }, + environment + }); + // @ts-expect-error type + can(ProjectPermissionActions.Delete, subject, { + secretPath: { $glob: secretPath }, + environment + }); + } + if (canRead) { + // @ts-expect-error type + can(ProjectPermissionActions.Read, subject, { + secretPath: { $glob: secretPath }, + environment + }); + } + } + ); }); return build({ conditionsMatcher }); @@ -311,5 +761,65 @@ export const isAtLeastAsPrivilegedWorkspace = ( return set1.size >= set2.size; }; - /* eslint-enable */ + +export const backfillPermissionV1SchemaToV2Schema = ( + data: z.infer[], + dontRemoveReadFolderPermission?: boolean +) => { + let formattedData = UnpackedPermissionSchema.array().parse(data); + const secretSubjects = formattedData.filter((el) => el.subject === ProjectPermissionSub.Secrets); + + // this means the folder permission as readonly is set + const hasReadOnlyFolder = formattedData.filter((el) => el.subject === ProjectPermissionSub.SecretFolders); + const secretImportPolicies = secretSubjects.map(({ subject, ...el }) => ({ + ...el, + subject: ProjectPermissionSub.SecretImports as const + })); + + const secretFolderPolicies = secretSubjects + .map(({ subject, ...el }) => ({ + ...el, + // read permission is not needed anymore + action: el.action.filter((caslAction) => caslAction !== ProjectPermissionActions.Read), + subject: ProjectPermissionSub.SecretFolders + })) + .filter((el) => el.action?.length > 0); + + const dynamicSecretPolicies = secretSubjects.map(({ subject, ...el }) => { + const action = el.action.map((e) => { + switch (e) { + case ProjectPermissionActions.Edit: + return ProjectPermissionDynamicSecretActions.EditRootCredential; + case ProjectPermissionActions.Create: + return ProjectPermissionDynamicSecretActions.CreateRootCredential; + case ProjectPermissionActions.Delete: + return ProjectPermissionDynamicSecretActions.DeleteRootCredential; + case ProjectPermissionActions.Read: + return ProjectPermissionDynamicSecretActions.ReadRootCredential; + default: + return ProjectPermissionDynamicSecretActions.ReadRootCredential; + } + }); + + return { + ...el, + action: el.action.includes(ProjectPermissionActions.Edit) + ? [...action, ProjectPermissionDynamicSecretActions.Lease] + : action, + subject: ProjectPermissionSub.DynamicSecrets + }; + }); + + if (!dontRemoveReadFolderPermission) { + formattedData = formattedData.filter((i) => i.subject !== ProjectPermissionSub.SecretFolders); + } + + return formattedData.concat( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore-error this is valid ts + secretImportPolicies, + dynamicSecretPolicies, + hasReadOnlyFolder.length ? [] : secretFolderPolicies + ); +}; diff --git a/backend/src/ee/services/project-template/project-template-constants.ts b/backend/src/ee/services/project-template/project-template-constants.ts new file mode 100644 index 0000000000..778de28616 --- /dev/null +++ b/backend/src/ee/services/project-template/project-template-constants.ts @@ -0,0 +1,5 @@ +export const ProjectTemplateDefaultEnvironments = [ + { name: "Development", slug: "dev", position: 1 }, + { name: "Staging", slug: "staging", position: 2 }, + { name: "Production", slug: "prod", position: 3 } +]; diff --git a/backend/src/ee/services/project-template/project-template-dal.ts b/backend/src/ee/services/project-template/project-template-dal.ts new file mode 100644 index 0000000000..4dbbea279d --- /dev/null +++ b/backend/src/ee/services/project-template/project-template-dal.ts @@ -0,0 +1,7 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TProjectTemplateDALFactory = ReturnType; + +export const projectTemplateDALFactory = (db: TDbClient) => ormify(db, TableName.ProjectTemplates); diff --git a/backend/src/ee/services/project-template/project-template-fns.ts b/backend/src/ee/services/project-template/project-template-fns.ts new file mode 100644 index 0000000000..2ca78e8762 --- /dev/null +++ b/backend/src/ee/services/project-template/project-template-fns.ts @@ -0,0 +1,24 @@ +import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants"; +import { + InfisicalProjectTemplate, + TUnpackedPermission +} from "@app/ee/services/project-template/project-template-types"; +import { getPredefinedRoles } from "@app/services/project-role/project-role-fns"; + +export const getDefaultProjectTemplate = (orgId: string) => ({ + id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // random ID to appease zod + name: InfisicalProjectTemplate.Default, + createdAt: new Date(), + updatedAt: new Date(), + description: "Infisical's default project template", + environments: ProjectTemplateDefaultEnvironments, + roles: [...getPredefinedRoles("project-template")].map(({ name, slug, permissions }) => ({ + name, + slug, + permissions: permissions as TUnpackedPermission[] + })), + orgId +}); + +export const isInfisicalProjectTemplate = (template: string) => + Object.values(InfisicalProjectTemplate).includes(template as InfisicalProjectTemplate); diff --git a/backend/src/ee/services/project-template/project-template-service.ts b/backend/src/ee/services/project-template/project-template-service.ts new file mode 100644 index 0000000000..5afa58caf4 --- /dev/null +++ b/backend/src/ee/services/project-template/project-template-service.ts @@ -0,0 +1,265 @@ +import { ForbiddenError } from "@casl/ability"; +import { packRules } from "@casl/ability/extra"; + +import { TProjectTemplates } from "@app/db/schemas"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { getDefaultProjectTemplate } from "@app/ee/services/project-template/project-template-fns"; +import { + TCreateProjectTemplateDTO, + TProjectTemplateEnvironment, + TProjectTemplateRole, + TUnpackedPermission, + TUpdateProjectTemplateDTO +} from "@app/ee/services/project-template/project-template-types"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { OrgServiceActor } from "@app/lib/types"; +import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission"; +import { getPredefinedRoles } from "@app/services/project-role/project-role-fns"; + +import { TProjectTemplateDALFactory } from "./project-template-dal"; + +type TProjectTemplatesServiceFactoryDep = { + licenseService: TLicenseServiceFactory; + permissionService: TPermissionServiceFactory; + projectTemplateDAL: TProjectTemplateDALFactory; +}; + +export type TProjectTemplateServiceFactory = ReturnType; + +const $unpackProjectTemplate = ({ roles, environments, ...rest }: TProjectTemplates) => ({ + ...rest, + environments: environments as TProjectTemplateEnvironment[], + roles: [ + ...getPredefinedRoles("project-template").map(({ name, slug, permissions }) => ({ + name, + slug, + permissions: permissions as TUnpackedPermission[] + })), + ...(roles as TProjectTemplateRole[]).map((role) => ({ + ...role, + permissions: unpackPermissions(role.permissions) + })) + ] +}); + +export const projectTemplateServiceFactory = ({ + licenseService, + permissionService, + projectTemplateDAL +}: TProjectTemplatesServiceFactoryDep) => { + const listProjectTemplatesByOrg = async (actor: OrgServiceActor) => { + const plan = await licenseService.getPlan(actor.orgId); + + if (!plan.projectTemplates) + throw new BadRequestError({ + message: "Failed to access project templates due to plan restriction. Upgrade plan to access project templates." + }); + + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + actor.orgId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates); + + const projectTemplates = await projectTemplateDAL.find({ + orgId: actor.orgId + }); + + return [ + getDefaultProjectTemplate(actor.orgId), + ...projectTemplates.map((template) => $unpackProjectTemplate(template)) + ]; + }; + + const findProjectTemplateByName = async (name: string, actor: OrgServiceActor) => { + const plan = await licenseService.getPlan(actor.orgId); + + if (!plan.projectTemplates) + throw new BadRequestError({ + message: "Failed to access project template due to plan restriction. Upgrade plan to access project templates." + }); + + const projectTemplate = await projectTemplateDAL.findOne({ name, orgId: actor.orgId }); + + if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with Name "${name}"` }); + + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + projectTemplate.orgId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates); + + return { + ...$unpackProjectTemplate(projectTemplate), + packedRoles: projectTemplate.roles as TProjectTemplateRole[] // preserve packed for when applying template + }; + }; + + const findProjectTemplateById = async (id: string, actor: OrgServiceActor) => { + const plan = await licenseService.getPlan(actor.orgId); + + if (!plan.projectTemplates) + throw new BadRequestError({ + message: "Failed to access project template due to plan restriction. Upgrade plan to access project templates." + }); + + const projectTemplate = await projectTemplateDAL.findById(id); + + if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` }); + + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + projectTemplate.orgId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates); + + return { + ...$unpackProjectTemplate(projectTemplate), + packedRoles: projectTemplate.roles as TProjectTemplateRole[] // preserve packed for when applying template + }; + }; + + const createProjectTemplate = async ( + { roles, environments, ...params }: TCreateProjectTemplateDTO, + actor: OrgServiceActor + ) => { + const plan = await licenseService.getPlan(actor.orgId); + + if (!plan.projectTemplates) + throw new BadRequestError({ + message: "Failed to create project template due to plan restriction. Upgrade plan to access project templates." + }); + + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + actor.orgId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates); + + const isConflictingName = Boolean( + await projectTemplateDAL.findOne({ + name: params.name, + orgId: actor.orgId + }) + ); + + if (isConflictingName) + throw new BadRequestError({ + message: `A project template with the name "${params.name}" already exists.` + }); + + const projectTemplate = await projectTemplateDAL.create({ + ...params, + roles: JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) }))), + environments: JSON.stringify(environments), + orgId: actor.orgId + }); + + return $unpackProjectTemplate(projectTemplate); + }; + + const updateProjectTemplateById = async ( + id: string, + { roles, environments, ...params }: TUpdateProjectTemplateDTO, + actor: OrgServiceActor + ) => { + const plan = await licenseService.getPlan(actor.orgId); + + if (!plan.projectTemplates) + throw new BadRequestError({ + message: "Failed to update project template due to plan restriction. Upgrade plan to access project templates." + }); + + const projectTemplate = await projectTemplateDAL.findById(id); + + if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` }); + + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + projectTemplate.orgId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates); + + if (params.name && projectTemplate.name !== params.name) { + const isConflictingName = Boolean( + await projectTemplateDAL.findOne({ + name: params.name, + orgId: projectTemplate.orgId + }) + ); + + if (isConflictingName) + throw new BadRequestError({ + message: `A project template with the name "${params.name}" already exists.` + }); + } + + const updatedProjectTemplate = await projectTemplateDAL.updateById(id, { + ...params, + roles: roles + ? JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) }))) + : undefined, + environments: environments ? JSON.stringify(environments) : undefined + }); + + return $unpackProjectTemplate(updatedProjectTemplate); + }; + + const deleteProjectTemplateById = async (id: string, actor: OrgServiceActor) => { + const plan = await licenseService.getPlan(actor.orgId); + + if (!plan.projectTemplates) + throw new BadRequestError({ + message: "Failed to delete project template due to plan restriction. Upgrade plan to access project templates." + }); + + const projectTemplate = await projectTemplateDAL.findById(id); + + if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` }); + + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + projectTemplate.orgId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.ProjectTemplates); + + const deletedProjectTemplate = await projectTemplateDAL.deleteById(id); + + return $unpackProjectTemplate(deletedProjectTemplate); + }; + + return { + listProjectTemplatesByOrg, + createProjectTemplate, + updateProjectTemplateById, + deleteProjectTemplateById, + findProjectTemplateById, + findProjectTemplateByName + }; +}; diff --git a/backend/src/ee/services/project-template/project-template-types.ts b/backend/src/ee/services/project-template/project-template-types.ts new file mode 100644 index 0000000000..6b600f3868 --- /dev/null +++ b/backend/src/ee/services/project-template/project-template-types.ts @@ -0,0 +1,28 @@ +import { z } from "zod"; + +import { TProjectEnvironments } from "@app/db/schemas"; +import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission"; +import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission"; + +export type TProjectTemplateEnvironment = Pick; + +export type TProjectTemplateRole = { + slug: string; + name: string; + permissions: TProjectPermissionV2Schema[]; +}; + +export type TCreateProjectTemplateDTO = { + name: string; + description?: string; + roles: TProjectTemplateRole[]; + environments: TProjectTemplateEnvironment[]; +}; + +export type TUpdateProjectTemplateDTO = Partial; + +export type TUnpackedPermission = z.infer; + +export enum InfisicalProjectTemplate { + Default = "default" +} diff --git a/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-service.ts b/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-service.ts index c9ff2c7e07..86d8e652a5 100644 --- a/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-service.ts +++ b/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-service.ts @@ -1,11 +1,16 @@ -import { ForbiddenError } from "@casl/ability"; +import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability"; +import { PackRule, packRules, unpackRules } from "@casl/ability/extra"; import ms from "ms"; -import { BadRequestError } from "@app/lib/errors"; +import { TableName } from "@app/db/schemas"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission"; +import { ActorType } from "@app/services/auth/auth-type"; import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; import { TPermissionServiceFactory } from "../permission/permission-service"; -import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission"; +import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission"; import { TProjectUserAdditionalPrivilegeDALFactory } from "./project-user-additional-privilege-dal"; import { ProjectUserAdditionalPrivilegeTemporaryMode, @@ -18,7 +23,7 @@ import { type TProjectUserAdditionalPrivilegeServiceFactoryDep = { projectUserAdditionalPrivilegeDAL: TProjectUserAdditionalPrivilegeDALFactory; - projectMembershipDAL: Pick; + projectMembershipDAL: Pick; permissionService: Pick; }; @@ -26,6 +31,11 @@ export type TProjectUserAdditionalPrivilegeServiceFactory = ReturnType< typeof projectUserAdditionalPrivilegeServiceFactory >; +const unpackPermissions = (permissions: unknown) => + UnpackedPermissionSchema.array().parse( + unpackRules((permissions || []) as PackRule>>[]) + ); + export const projectUserAdditionalPrivilegeServiceFactory = ({ projectUserAdditionalPrivilegeDAL, projectMembershipDAL, @@ -42,7 +52,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ ...dto }: TCreateUserPrivilegeDTO) => { const projectMembership = await projectMembershipDAL.findById(projectMembershipId); - if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" }); + if (!projectMembership) + throw new NotFoundError({ message: `Project membership with ID ${projectMembershipId} found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -52,31 +63,59 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member); + const { permission: targetUserPermission } = await permissionService.getProjectPermission( + ActorType.USER, + projectMembership.userId, + projectMembership.projectId, + actorAuthMethod, + actorOrgId + ); - const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({ slug, projectMembershipId }); - if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" }); + // we need to validate that the privilege given is not higher than the assigning users permission + // @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules + targetUserPermission.update(targetUserPermission.rules.concat(customPermission)); + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission); + if (!hasRequiredPriviledges) + throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" }); + const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({ + slug, + projectId: projectMembership.projectId, + userId: projectMembership.userId + }); + if (existingSlug) + throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` }); + + const packedPermission = JSON.stringify(packRules(customPermission)); if (!dto.isTemporary) { const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({ - projectMembershipId, + userId: projectMembership.userId, + projectId: projectMembership.projectId, slug, - permissions: customPermission + permissions: packedPermission }); - return additionalPrivilege; + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; } const relativeTempAllocatedTimeInMs = ms(dto.temporaryRange); const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({ - projectMembershipId, + projectId: projectMembership.projectId, + userId: projectMembership.userId, slug, - permissions: customPermission, + permissions: packedPermission, isTemporary: true, temporaryMode: ProjectUserAdditionalPrivilegeTemporaryMode.Relative, temporaryRange: dto.temporaryRange, temporaryAccessStartTime: new Date(dto.temporaryAccessStartTime), temporaryAccessEndTime: new Date(new Date(dto.temporaryAccessStartTime).getTime() + relativeTempAllocatedTimeInMs) }); - return additionalPrivilege; + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; }; const updateById = async ({ @@ -88,10 +127,18 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ ...dto }: TUpdateUserPrivilegeDTO) => { const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId); - if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" }); + if (!userPrivilege) + throw new NotFoundError({ message: `User additional privilege with ID ${privilegeId} not found` }); - const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId); - if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" }); + const projectMembership = await projectMembershipDAL.findOne({ + userId: userPrivilege.userId, + projectId: userPrivilege.projectId + }); + + if (!projectMembership) + throw new NotFoundError({ + message: `Project membership for user with ID '${userPrivilege.userId}' not found in project with ID '${userPrivilege.projectId}'` + }); const { permission } = await permissionService.getProjectPermission( actor, @@ -101,45 +148,81 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member); + const { permission: targetUserPermission } = await permissionService.getProjectPermission( + ActorType.USER, + projectMembership.userId, + projectMembership.projectId, + actorAuthMethod, + actorOrgId + ); + + // we need to validate that the privilege given is not higher than the assigning users permission + // @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules + targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || [])); + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission); + if (!hasRequiredPriviledges) + throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" }); if (dto?.slug) { const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({ slug: dto.slug, - projectMembershipId: projectMembership.id + userId: projectMembership.id, + projectId: projectMembership.projectId }); if (existingSlug && existingSlug.id !== userPrivilege.id) - throw new BadRequestError({ message: "Additional privilege of provided slug exist" }); + throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` }); } const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary; + + const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions)); if (isTemporary) { const temporaryAccessStartTime = dto?.temporaryAccessStartTime || userPrivilege?.temporaryAccessStartTime; const temporaryRange = dto?.temporaryRange || userPrivilege?.temporaryRange; const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.updateById(userPrivilege.id, { - ...dto, + slug: dto.slug, + permissions: packedPermission, + isTemporary: dto.isTemporary, + temporaryRange: dto.temporaryRange, + temporaryMode: dto.temporaryMode, temporaryAccessStartTime: new Date(temporaryAccessStartTime || ""), temporaryAccessEndTime: new Date(new Date(temporaryAccessStartTime || "").getTime() + ms(temporaryRange || "")) }); - return additionalPrivilege; + + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; } const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.updateById(userPrivilege.id, { - ...dto, + slug: dto.slug, + permissions: packedPermission, isTemporary: false, temporaryAccessStartTime: null, temporaryAccessEndTime: null, temporaryRange: null, temporaryMode: null }); - return additionalPrivilege; + return { + ...additionalPrivilege, + permissions: unpackPermissions(additionalPrivilege.permissions) + }; }; const deleteById = async ({ actorId, actor, actorOrgId, actorAuthMethod, privilegeId }: TDeleteUserPrivilegeDTO) => { const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId); - if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" }); + if (!userPrivilege) + throw new NotFoundError({ message: `User additional privilege with ID ${privilegeId} not found` }); - const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId); - if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" }); + const projectMembership = await projectMembershipDAL.findOne({ + userId: userPrivilege.userId, + projectId: userPrivilege.projectId + }); + if (!projectMembership) + throw new NotFoundError({ + message: `Project membership for user with ID '${userPrivilege.userId}' not found in project with ID '${userPrivilege.projectId}'` + }); const { permission } = await permissionService.getProjectPermission( actor, @@ -151,7 +234,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member); const deletedPrivilege = await projectUserAdditionalPrivilegeDAL.deleteById(userPrivilege.id); - return deletedPrivilege; + return { + ...deletedPrivilege, + permissions: unpackPermissions(deletedPrivilege.permissions) + }; }; const getPrivilegeDetailsById = async ({ @@ -162,10 +248,17 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ actorAuthMethod }: TGetUserPrivilegeDetailsDTO) => { const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId); - if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" }); + if (!userPrivilege) + throw new NotFoundError({ message: `User additional privilege with ID ${privilegeId} not found` }); - const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId); - if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" }); + const projectMembership = await projectMembershipDAL.findOne({ + userId: userPrivilege.userId, + projectId: userPrivilege.projectId + }); + if (!projectMembership) + throw new NotFoundError({ + message: `Project membership for user with ID '${userPrivilege.userId}' not found in project with ID '${userPrivilege.projectId}'` + }); const { permission } = await permissionService.getProjectPermission( actor, @@ -176,7 +269,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member); - return userPrivilege; + return { + ...userPrivilege, + permissions: unpackPermissions(userPrivilege.permissions) + }; }; const listPrivileges = async ({ @@ -187,7 +283,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ actorAuthMethod }: TListUserPrivilegesDTO) => { const projectMembership = await projectMembershipDAL.findById(projectMembershipId); - if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" }); + if (!projectMembership) + throw new NotFoundError({ message: `Project membership with ID ${projectMembershipId} not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -198,7 +295,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member); - const userPrivileges = await projectUserAdditionalPrivilegeDAL.find({ projectMembershipId }); + const userPrivileges = await projectUserAdditionalPrivilegeDAL.find( + { + userId: projectMembership.userId, + projectId: projectMembership.projectId + }, + { sort: [[`${TableName.ProjectUserAdditionalPrivilege}.slug` as "slug", "asc"]] } + ); return userPrivileges; }; diff --git a/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-types.ts b/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-types.ts index 572474270c..cfcf75872a 100644 --- a/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-types.ts +++ b/backend/src/ee/services/project-user-additional-privilege/project-user-additional-privilege-types.ts @@ -1,18 +1,20 @@ import { TProjectPermission } from "@app/lib/types"; +import { TProjectPermissionV2Schema } from "../permission/project-permission"; + export enum ProjectUserAdditionalPrivilegeTemporaryMode { Relative = "relative" } export type TCreateUserPrivilegeDTO = ( | { - permissions: unknown; + permissions: TProjectPermissionV2Schema[]; projectMembershipId: string; slug: string; isTemporary: false; } | { - permissions: unknown; + permissions: TProjectPermissionV2Schema[]; projectMembershipId: string; slug: string; isTemporary: true; @@ -25,7 +27,7 @@ export type TCreateUserPrivilegeDTO = ( export type TUpdateUserPrivilegeDTO = { privilegeId: string } & Omit & Partial<{ - permissions: unknown; + permissions: TProjectPermissionV2Schema[]; slug: string; isTemporary: boolean; temporaryMode: ProjectUserAdditionalPrivilegeTemporaryMode.Relative; diff --git a/backend/src/ee/services/rate-limit/rate-limit-dal.ts b/backend/src/ee/services/rate-limit/rate-limit-dal.ts new file mode 100644 index 0000000000..7279ff8ea1 --- /dev/null +++ b/backend/src/ee/services/rate-limit/rate-limit-dal.ts @@ -0,0 +1,7 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TRateLimitDALFactory = ReturnType; + +export const rateLimitDALFactory = (db: TDbClient) => ormify(db, TableName.RateLimit, {}); diff --git a/backend/src/ee/services/rate-limit/rate-limit-service.ts b/backend/src/ee/services/rate-limit/rate-limit-service.ts new file mode 100644 index 0000000000..208fa84283 --- /dev/null +++ b/backend/src/ee/services/rate-limit/rate-limit-service.ts @@ -0,0 +1,104 @@ +import { CronJob } from "cron"; + +import { logger } from "@app/lib/logger"; + +import { TLicenseServiceFactory } from "../license/license-service"; +import { TRateLimitDALFactory } from "./rate-limit-dal"; +import { RateLimitConfiguration, TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types"; + +let rateLimitMaxConfiguration: RateLimitConfiguration = { + readLimit: 60, + publicEndpointLimit: 30, + writeLimit: 200, + secretsLimit: 60, + authRateLimit: 60, + inviteUserRateLimit: 30, + mfaRateLimit: 20 +}; + +Object.freeze(rateLimitMaxConfiguration); + +export const getRateLimiterConfig = () => { + return rateLimitMaxConfiguration; +}; + +type TRateLimitServiceFactoryDep = { + rateLimitDAL: TRateLimitDALFactory; + licenseService: Pick; +}; + +export type TRateLimitServiceFactory = ReturnType; + +export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateLimitServiceFactoryDep) => { + const DEFAULT_RATE_LIMIT_CONFIG_ID = "00000000-0000-0000-0000-000000000000"; + + const getRateLimits = async (): Promise => { + let rateLimit: TRateLimit; + + try { + rateLimit = await rateLimitDAL.findOne({ id: DEFAULT_RATE_LIMIT_CONFIG_ID }); + if (!rateLimit) { + // rate limit might not exist + rateLimit = await rateLimitDAL.create({ + // @ts-expect-error id is kept as fixed because there should only be one rate limit config per instance + id: DEFAULT_RATE_LIMIT_CONFIG_ID + }); + } + return rateLimit; + } catch (err) { + logger.error("Error fetching rate limits %o", err); + return undefined; + } + }; + + const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise => { + return rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates); + }; + + const syncRateLimitConfiguration = async () => { + try { + const rateLimit = await getRateLimits(); + if (rateLimit) { + const newRateLimitMaxConfiguration: typeof rateLimitMaxConfiguration = { + readLimit: rateLimit.readRateLimit, + publicEndpointLimit: rateLimit.publicEndpointLimit, + writeLimit: rateLimit.writeRateLimit, + secretsLimit: rateLimit.secretsRateLimit, + authRateLimit: rateLimit.authRateLimit, + inviteUserRateLimit: rateLimit.inviteUserRateLimit, + mfaRateLimit: rateLimit.mfaRateLimit + }; + + logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration); + Object.freeze(newRateLimitMaxConfiguration); + rateLimitMaxConfiguration = newRateLimitMaxConfiguration; + } + } catch (error) { + logger.error(`Error syncing rate limit configurations: %o`, error); + } + }; + + const initializeBackgroundSync = async () => { + if (!licenseService.onPremFeatures.customRateLimits) { + logger.info("Current license does not support custom rate limit configuration"); + return; + } + + logger.info("Setting up background sync process for rate limits"); + // initial sync upon startup + await syncRateLimitConfiguration(); + + // sync rate limits configuration every 10 minutes + const job = new CronJob("*/10 * * * *", syncRateLimitConfiguration); + job.start(); + + return job; + }; + + return { + getRateLimits, + updateRateLimit, + initializeBackgroundSync, + syncRateLimitConfiguration + }; +}; diff --git a/backend/src/ee/services/rate-limit/rate-limit-types.ts b/backend/src/ee/services/rate-limit/rate-limit-types.ts new file mode 100644 index 0000000000..d924dce511 --- /dev/null +++ b/backend/src/ee/services/rate-limit/rate-limit-types.ts @@ -0,0 +1,25 @@ +export type TRateLimitUpdateDTO = { + readRateLimit: number; + writeRateLimit: number; + secretsRateLimit: number; + authRateLimit: number; + inviteUserRateLimit: number; + mfaRateLimit: number; + publicEndpointLimit: number; +}; + +export type TRateLimit = { + id: string; + createdAt: Date; + updatedAt: Date; +} & TRateLimitUpdateDTO; + +export type RateLimitConfiguration = { + readLimit: number; + publicEndpointLimit: number; + writeLimit: number; + secretsLimit: number; + authRateLimit: number; + inviteUserRateLimit: number; + mfaRateLimit: number; +}; diff --git a/backend/src/ee/services/saml-config/saml-config-dal.ts b/backend/src/ee/services/saml-config/saml-config-dal.ts index 1e7b9e47e8..aff42230ff 100644 --- a/backend/src/ee/services/saml-config/saml-config-dal.ts +++ b/backend/src/ee/services/saml-config/saml-config-dal.ts @@ -10,7 +10,8 @@ export const samlConfigDALFactory = (db: TDbClient) => { const findEnforceableSamlCfg = async (orgId: string) => { try { - const samlCfg = await db(TableName.SamlConfig) + const samlCfg = await db + .replicaNode()(TableName.SamlConfig) .where({ orgId, isActive: true diff --git a/backend/src/ee/services/saml-config/saml-config-service.ts b/backend/src/ee/services/saml-config/saml-config-service.ts index 5d7b7ec3b9..2930d06f90 100644 --- a/backend/src/ee/services/saml-config/saml-config-service.ts +++ b/backend/src/ee/services/saml-config/saml-config-service.ts @@ -2,7 +2,6 @@ import { ForbiddenError } from "@casl/ability"; import jwt from "jsonwebtoken"; import { - OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TableName, @@ -19,15 +18,18 @@ import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { AuthTokenType } from "@app/services/auth/auth-type"; import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; import { TokenType } from "@app/services/auth-token/auth-token-types"; +import { TIdentityMetadataDALFactory } from "@app/services/identity/identity-metadata-dal"; import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; +import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns"; import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; +import { LoginMethod } from "@app/services/super-admin/super-admin-types"; import { TUserDALFactory } from "@app/services/user/user-dal"; import { normalizeUsername } from "@app/services/user/user-fns"; import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; @@ -41,12 +43,17 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f type TSamlConfigServiceFactoryDep = { samlConfigDAL: Pick; - userDAL: Pick; + userDAL: Pick< + TUserDALFactory, + "create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId" + >; userAliasDAL: Pick; orgDAL: Pick< TOrgDALFactory, "createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById" >; + + identityMetadataDAL: Pick; orgMembershipDAL: Pick; orgBotDAL: Pick; permissionService: Pick; @@ -67,7 +74,8 @@ export const samlConfigServiceFactory = ({ permissionService, licenseService, tokenService, - smtpService + smtpService, + identityMetadataDAL }: TSamlConfigServiceFactoryDep) => { const createSamlCfg = async ({ cert, @@ -183,7 +191,11 @@ export const samlConfigServiceFactory = ({ const updateQuery: TSamlConfigsUpdate = { authProvider, isActive, lastUsed: null }; const orgBot = await orgBotDAL.findOne({ orgId }); - if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" }); + if (!orgBot) + throw new NotFoundError({ + message: `Organization bot not found for organization with ID '${orgId}'`, + name: "OrgBotNotFound" + }); const key = infisicalSymmetricDecrypt({ ciphertext: orgBot.encryptedSymmetricKey, iv: orgBot.symmetricKeyIV, @@ -249,7 +261,7 @@ export const samlConfigServiceFactory = ({ ssoConfig = await samlConfigDAL.findById(id); } - if (!ssoConfig) throw new BadRequestError({ message: "Failed to find organization SSO data" }); + if (!ssoConfig) throw new NotFoundError({ message: `Failed to find SSO data` }); // when dto is type id means it's internally used if (dto.type === "org") { @@ -275,7 +287,11 @@ export const samlConfigServiceFactory = ({ } = ssoConfig; const orgBot = await orgBotDAL.findOne({ orgId: ssoConfig.orgId }); - if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" }); + if (!orgBot) + throw new NotFoundError({ + message: `Organization bot not found in organization with ID '${ssoConfig.orgId}'`, + name: "OrgBotNotFound" + }); const key = infisicalSymmetricDecrypt({ ciphertext: orgBot.encryptedSymmetricKey, iv: orgBot.symmetricKeyIV, @@ -328,10 +344,18 @@ export const samlConfigServiceFactory = ({ lastName, authProvider, orgId, - relayState + relayState, + metadata }: TSamlLoginDTO) => { const appCfg = getConfig(); const serverCfg = await getServerCfg(); + + if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.SAML)) { + throw new ForbiddenRequestError({ + message: "Login with SAML is disabled by administrator." + }); + } + const userAlias = await userAliasDAL.findOne({ externalId, orgId, @@ -339,7 +363,7 @@ export const samlConfigServiceFactory = ({ }); const organization = await orgDAL.findOrgById(orgId); - if (!organization) throw new BadRequestError({ message: "Org not found" }); + if (!organization) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); let user: TUsers; if (userAlias) { @@ -353,13 +377,17 @@ export const samlConfigServiceFactory = ({ { tx } ); if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(organization.defaultMembershipRole); + await orgMembershipDAL.create( { userId: userAlias.userId, inviteEmail: email, orgId, - role: OrgMembershipRole.Member, - status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + role, + roleId, + status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + isActive: true }, tx ); @@ -374,9 +402,39 @@ export const samlConfigServiceFactory = ({ ); } + if (metadata && foundUser.id) { + await identityMetadataDAL.delete({ userId: foundUser.id, orgId }, tx); + if (metadata.length) { + await identityMetadataDAL.insertMany( + metadata.map(({ key, value }) => ({ + userId: foundUser.id, + orgId, + key, + value + })), + tx + ); + } + } + return foundUser; }); } else { + const plan = await licenseService.getPlan(orgId); + if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) { + // limit imposed on number of members allowed / number of members used exceeds the number of members allowed + throw new BadRequestError({ + message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members." + }); + } + + if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) { + // limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed + throw new BadRequestError({ + message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members." + }); + } + user = await userDAL.transaction(async (tx) => { let newUser: TUsers | undefined; if (serverCfg.trustSamlEmails) { @@ -425,13 +483,17 @@ export const samlConfigServiceFactory = ({ ); if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(organization.defaultMembershipRole); + await orgMembershipDAL.create( { userId: newUser.id, inviteEmail: email, orgId, - role: OrgMembershipRole.Member, - status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + role, + roleId, + status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + isActive: true }, tx ); @@ -446,12 +508,27 @@ export const samlConfigServiceFactory = ({ ); } + if (metadata && newUser.id) { + await identityMetadataDAL.delete({ userId: newUser.id, orgId }, tx); + if (metadata.length) { + await identityMetadataDAL.insertMany( + metadata.map(({ key, value }) => ({ + userId: newUser?.id, + orgId, + key, + value + })), + tx + ); + } + } return newUser; }); } await licenseService.updateSubscriptionOrgMemberCount(organization.id); const isUserCompleted = Boolean(user.isAccepted); + const userEnc = await userDAL.findUserEncKeyByUserId(user.id); const providerAuthToken = jwt.sign( { authTokenType: AuthTokenType.PROVIDER_TOKEN, @@ -464,6 +541,7 @@ export const samlConfigServiceFactory = ({ organizationId: organization.id, organizationSlug: organization.slug, authMethod: authProvider, + hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey), authType: UserAliasType.SAML, isUserCompleted, ...(relayState diff --git a/backend/src/ee/services/saml-config/saml-config-types.ts b/backend/src/ee/services/saml-config/saml-config-types.ts index 92ee32b5c6..96cb910357 100644 --- a/backend/src/ee/services/saml-config/saml-config-types.ts +++ b/backend/src/ee/services/saml-config/saml-config-types.ts @@ -53,4 +53,5 @@ export type TSamlLoginDTO = { orgId: string; // saml thingy relayState?: string; + metadata?: { key: string; value: string }[]; }; diff --git a/backend/src/ee/services/scim/scim-fns.ts b/backend/src/ee/services/scim/scim-fns.ts index ec54a4d1fc..3ade1a1179 100644 --- a/backend/src/ee/services/scim/scim-fns.ts +++ b/backend/src/ee/services/scim/scim-fns.ts @@ -18,20 +18,44 @@ export const buildScimUserList = ({ }; }; +export const parseScimFilter = (filterToParse: string | undefined) => { + if (!filterToParse) return {}; + const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim()); + + let attributeName = parsedName; + if (parsedName === "userName") { + attributeName = "email"; + } else if (parsedName === "displayName") { + attributeName = "name"; + } + + return { [attributeName]: parsedValue.replace(/"/g, "") }; +}; + +export function extractScimValueFromPath(path: string): string | null { + const regex = /members\[value eq "([^"]+)"\]/; + const match = path.match(regex); + return match ? match[1] : null; +} + export const buildScimUser = ({ orgMembershipId, username, email, firstName, lastName, - active + active, + createdAt, + updatedAt }: { orgMembershipId: string; username: string; email?: string | null; - firstName: string; - lastName: string; + firstName: string | null | undefined; + lastName: string | null | undefined; active: boolean; + createdAt: Date; + updatedAt: Date; }): TScimUser => { const scimUser = { schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"], @@ -39,9 +63,9 @@ export const buildScimUser = ({ userName: username, displayName: `${firstName} ${lastName}`, name: { - givenName: firstName, + givenName: firstName || "", middleName: null, - familyName: lastName + familyName: lastName || "" }, emails: email ? [ @@ -53,10 +77,10 @@ export const buildScimUser = ({ ] : [], active, - groups: [], meta: { resourceType: "User", - location: null + created: createdAt, + lastModified: updatedAt } }; @@ -84,14 +108,18 @@ export const buildScimGroupList = ({ export const buildScimGroup = ({ groupId, name, - members + members, + updatedAt, + createdAt }: { groupId: string; name: string; members: { value: string; - display: string; + display?: string; }[]; + createdAt: Date; + updatedAt: Date; }): TScimGroup => { const scimGroup = { schemas: ["urn:ietf:params:scim:schemas:core:2.0:Group"], @@ -100,7 +128,8 @@ export const buildScimGroup = ({ members, meta: { resourceType: "Group", - location: null + created: createdAt, + lastModified: updatedAt } }; diff --git a/backend/src/ee/services/scim/scim-service.ts b/backend/src/ee/services/scim/scim-service.ts index 8f8d5dbc95..0f814d2ac7 100644 --- a/backend/src/ee/services/scim/scim-service.ts +++ b/backend/src/ee/services/scim/scim-service.ts @@ -1,6 +1,7 @@ import { ForbiddenError } from "@casl/ability"; import slugify from "@sindresorhus/slugify"; import jwt from "jsonwebtoken"; +import { scimPatch } from "scim-patch"; import { OrgMembershipRole, OrgMembershipStatus, TableName, TGroups, TOrgMemberships, TUsers } from "@app/db/schemas"; import { TGroupDALFactory } from "@app/ee/services/group/group-dal"; @@ -8,13 +9,15 @@ import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal"; import { TScimDALFactory } from "@app/ee/services/scim/scim-dal"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError, ScimRequestError, UnauthorizedError } from "@app/lib/errors"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { TOrgPermission } from "@app/lib/types"; import { AuthTokenType } from "@app/services/auth/auth-type"; +import { TExternalGroupOrgRoleMappingDALFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-dal"; import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { deleteOrgMembershipFn } from "@app/services/org/org-fns"; +import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns"; import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal"; @@ -30,7 +33,8 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types"; import { TLicenseServiceFactory } from "../license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission"; import { TPermissionServiceFactory } from "../permission/permission-service"; -import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList } from "./scim-fns"; +import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal"; +import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns"; import { TCreateScimGroupDTO, TCreateScimTokenDTO, @@ -44,6 +48,7 @@ import { TListScimUsers, TListScimUsersDTO, TReplaceScimUserDTO, + TScimGroup, TScimTokenJwtPayload, TUpdateScimGroupNamePatchDTO, TUpdateScimGroupNamePutDTO, @@ -54,30 +59,54 @@ type TScimServiceFactoryDep = { scimDAL: Pick; userDAL: Pick< TUserDALFactory, - "find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById" + "find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById" | "updateById" >; - userAliasDAL: Pick; + userAliasDAL: Pick; orgDAL: Pick< TOrgDALFactory, - "createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction" | "updateMembershipById" + | "createMembership" + | "findById" + | "findMembership" + | "findMembershipWithScimFilter" + | "deleteMembershipById" + | "transaction" + | "updateMembershipById" + >; + orgMembershipDAL: Pick< + TOrgMembershipDALFactory, + "find" | "findOne" | "create" | "updateById" | "findById" | "update" >; - orgMembershipDAL: Pick; projectDAL: Pick; projectMembershipDAL: Pick; groupDAL: Pick< TGroupDALFactory, - "create" | "findOne" | "findAllGroupMembers" | "update" | "delete" | "findGroups" | "transaction" + | "create" + | "findOne" + | "findAllGroupPossibleMembers" + | "delete" + | "findGroups" + | "transaction" + | "updateById" + | "update" >; groupProjectDAL: Pick; userGroupMembershipDAL: Pick< TUserGroupMembershipDALFactory, - "find" | "transaction" | "insertMany" | "filterProjectsByUserMembership" | "delete" + | "find" + | "transaction" + | "insertMany" + | "filterProjectsByUserMembership" + | "delete" + | "findGroupMembershipsByUserIdInOrg" + | "findGroupMembershipsByGroupIdInOrg" >; projectKeyDAL: Pick; projectBotDAL: Pick; licenseService: Pick; permissionService: Pick; smtpService: Pick; + projectUserAdditionalPrivilegeDAL: Pick; + externalGroupOrgRoleMappingDAL: TExternalGroupOrgRoleMappingDALFactory; }; export type TScimServiceFactory = ReturnType; @@ -97,7 +126,9 @@ export const scimServiceFactory = ({ projectKeyDAL, projectBotDAL, permissionService, - smtpService + projectUserAdditionalPrivilegeDAL, + smtpService, + externalGroupOrgRoleMappingDAL }: TScimServiceFactoryDep) => { const createScimToken = async ({ actor, @@ -152,7 +183,7 @@ export const scimServiceFactory = ({ const deleteScimToken = async ({ scimTokenId, actor, actorId, actorAuthMethod, actorOrgId }: TDeleteScimTokenDTO) => { let scimToken = await scimDAL.findById(scimTokenId); - if (!scimToken) throw new BadRequestError({ message: "Failed to find SCIM token to delete" }); + if (!scimToken) throw new NotFoundError({ message: `SCIM token with ID '${scimTokenId}' not found` }); const { permission } = await permissionService.getOrgPermission( actor, @@ -175,7 +206,12 @@ export const scimServiceFactory = ({ }; // SCIM server endpoints - const listScimUsers = async ({ startIndex, limit, filter, orgId }: TListScimUsersDTO): Promise => { + const listScimUsers = async ({ + startIndex = 0, + limit = 100, + filter, + orgId + }: TListScimUsersDTO): Promise => { const org = await orgDAL.findById(orgId); if (!org.scimEnabled) @@ -184,40 +220,25 @@ export const scimServiceFactory = ({ status: 403 }); - const parseFilter = (filterToParse: string | undefined) => { - if (!filterToParse) return {}; - const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim()); - - let attributeName = parsedName; - if (parsedName === "userName") { - attributeName = "email"; - } - - return { [attributeName]: parsedValue.replace(/"/g, "") }; - }; - const findOpts = { ...(startIndex && { offset: startIndex - 1 }), ...(limit && { limit }) }; - const users = await orgDAL.findMembership( - { - [`${TableName.OrgMembership}.orgId` as "id"]: orgId, - ...parseFilter(filter) - }, - findOpts - ); + const users = await orgDAL.findMembershipWithScimFilter(orgId, filter, findOpts); - const scimUsers = users.map(({ id, externalId, username, firstName, lastName, email }) => - buildScimUser({ - orgMembershipId: id ?? "", - username: externalId ?? username, - firstName: firstName ?? "", - lastName: lastName ?? "", - email, - active: true - }) + const scimUsers = users.map( + ({ id, externalId, username, firstName, lastName, email, isActive, createdAt, updatedAt }) => + buildScimUser({ + orgMembershipId: id ?? "", + username: externalId ?? username, + firstName: firstName ?? "", + lastName: lastName ?? "", + email, + active: isActive, + createdAt, + updatedAt + }) ); return buildScimUserList({ @@ -256,9 +277,11 @@ export const scimServiceFactory = ({ orgMembershipId: membership.id, username: membership.externalId ?? membership.username, email: membership.email ?? "", - firstName: membership.firstName as string, - lastName: membership.lastName as string, - active: true + firstName: membership.firstName, + lastName: membership.lastName, + active: membership.isActive, + createdAt: membership.createdAt, + updatedAt: membership.updatedAt }); }; @@ -302,13 +325,17 @@ export const scimServiceFactory = ({ ); if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(org.defaultMembershipRole); + orgMembership = await orgMembershipDAL.create( { userId: userAlias.userId, inviteEmail: email, orgId, - role: OrgMembershipRole.Member, - status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + role, + roleId, + status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + isActive: true }, tx ); @@ -333,7 +360,11 @@ export const scimServiceFactory = ({ } if (!user) { - const uniqueUsername = await normalizeUsername(`${firstName}-${lastName}`, userDAL); + const uniqueUsername = await normalizeUsername( + // external id is username + `${firstName}-${lastName}`, + userDAL + ); user = await userDAL.create( { username: serverCfg.trustSamlEmails ? email : uniqueUsername, @@ -370,13 +401,17 @@ export const scimServiceFactory = ({ orgMembership = foundOrgMembership; if (!orgMembership) { + const { role, roleId } = await getDefaultOrgMembershipRole(org.defaultMembershipRole); + orgMembership = await orgMembershipDAL.create( { userId: user.id, inviteEmail: email, orgId, - role: OrgMembershipRole.Member, - status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + role, + roleId, + status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later + isActive: true }, tx ); @@ -410,13 +445,16 @@ export const scimServiceFactory = ({ return buildScimUser({ orgMembershipId: createdOrgMembership.id, username: externalId, - firstName: createdUser.firstName as string, - lastName: createdUser.lastName as string, + firstName: createdUser.firstName, + lastName: createdUser.lastName, email: createdUser.email ?? "", - active: true + active: createdOrgMembership.isActive, + createdAt: createdOrgMembership.createdAt, + updatedAt: createdOrgMembership.updatedAt }); }; + // partial const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => { const [membership] = await orgDAL .findMembership({ @@ -442,43 +480,52 @@ export const scimServiceFactory = ({ status: 403 }); - let active = true; - - operations.forEach((operation) => { - if (operation.op.toLowerCase() === "replace") { - if (operation.path === "active" && operation.value === "False") { - // azure scim op format - active = false; - } else if (typeof operation.value === "object" && operation.value.active === false) { - // okta scim op format - active = false; - } - } - }); - - if (!active) { - await deleteOrgMembershipFn({ - orgMembershipId: membership.id, - orgId: membership.orgId, - orgDAL, - projectMembershipDAL, - projectKeyDAL, - userAliasDAL, - licenseService - }); - } - - return buildScimUser({ + const scimUser = buildScimUser({ orgMembershipId: membership.id, - username: membership.externalId ?? membership.username, email: membership.email, - firstName: membership.firstName as string, - lastName: membership.lastName as string, - active + lastName: membership.lastName, + firstName: membership.firstName, + active: membership.isActive, + username: membership.externalId ?? membership.username, + createdAt: membership.createdAt, + updatedAt: membership.updatedAt }); + scimPatch(scimUser, operations); + + const serverCfg = await getServerCfg(); + await userDAL.transaction(async (tx) => { + await orgMembershipDAL.updateById( + membership.id, + { + isActive: scimUser.active + }, + tx + ); + const hasEmailChanged = scimUser.emails[0].value !== membership.email; + await userDAL.updateById( + membership.userId, + { + firstName: scimUser.name.givenName, + email: scimUser.emails[0].value, + lastName: scimUser.name.familyName, + isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true + }, + tx + ); + }); + + return scimUser; }; - const replaceScimUser = async ({ orgMembershipId, active, orgId }: TReplaceScimUserDTO) => { + const replaceScimUser = async ({ + orgMembershipId, + active, + orgId, + lastName, + firstName, + email, + externalId + }: TReplaceScimUserDTO) => { const [membership] = await orgDAL .findMembership({ [`${TableName.OrgMembership}.id` as "id"]: orgMembershipId, @@ -503,25 +550,47 @@ export const scimServiceFactory = ({ status: 403 }); - if (!active) { - await deleteOrgMembershipFn({ - orgMembershipId: membership.id, - orgId: membership.orgId, - orgDAL, - projectMembershipDAL, - projectKeyDAL, - userAliasDAL, - licenseService - }); - } + const serverCfg = await getServerCfg(); + await userDAL.transaction(async (tx) => { + await userAliasDAL.update( + { + orgId, + aliasType: UserAliasType.SAML, + userId: membership.userId + }, + { + externalId + }, + tx + ); + await orgMembershipDAL.updateById( + membership.id, + { + isActive: active + }, + tx + ); + await userDAL.updateById( + membership.userId, + { + firstName, + email, + lastName, + isEmailVerified: serverCfg.trustSamlEmails + }, + tx + ); + }); return buildScimUser({ orgMembershipId: membership.id, - username: membership.externalId ?? membership.username, + username: externalId, email: membership.email, - firstName: membership.firstName as string, - lastName: membership.lastName as string, - active + firstName: membership.firstName, + lastName: membership.lastName, + active, + createdAt: membership.createdAt, + updatedAt: membership.updatedAt }); }; @@ -549,6 +618,7 @@ export const scimServiceFactory = ({ orgId: membership.orgId, orgDAL, projectMembershipDAL, + projectUserAdditionalPrivilegeDAL, projectKeyDAL, userAliasDAL, licenseService @@ -557,7 +627,7 @@ export const scimServiceFactory = ({ return {}; // intentionally return empty object upon success }; - const listScimGroups = async ({ orgId, startIndex, limit }: TListScimGroupsDTO) => { + const listScimGroups = async ({ orgId, startIndex, limit, filter, isMembersExcluded }: TListScimGroupsDTO) => { const plan = await licenseService.getPlan(orgId); if (!plan.groups) throw new BadRequestError({ @@ -580,7 +650,8 @@ export const scimServiceFactory = ({ const groups = await groupDAL.findGroups( { - orgId + orgId, + ...(filter && parseScimFilter(filter)) }, { offset: startIndex - 1, @@ -588,13 +659,37 @@ export const scimServiceFactory = ({ } ); - const scimGroups = groups.map((group) => - buildScimGroup({ + const scimGroups: TScimGroup[] = []; + if (isMembersExcluded) { + return buildScimGroupList({ + scimGroups: groups.map((group) => + buildScimGroup({ + groupId: group.id, + name: group.name, + members: [], + createdAt: group.createdAt, + updatedAt: group.updatedAt + }) + ), + startIndex, + limit + }); + } + + for await (const group of groups) { + const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId); + const scimGroup = buildScimGroup({ groupId: group.id, name: group.name, - members: [] // does this need to be populated? - }) - ); + members: members.map((member) => ({ + value: member.orgMembershipId, + display: `${member.firstName ?? ""} ${member.lastName ?? ""}` + })), + createdAt: group.createdAt, + updatedAt: group.updatedAt + }); + scimGroups.push(scimGroup); + } return buildScimGroupList({ scimGroups, @@ -603,6 +698,43 @@ export const scimServiceFactory = ({ }); }; + const $syncNewMembersRoles = async (group: TGroups, members: TScimGroup["members"]) => { + // this function handles configuring newly provisioned users org membership if an external group mapping exists + + if (!members.length) return; + + const externalGroupMapping = await externalGroupOrgRoleMappingDAL.findOne({ + orgId: group.orgId, + groupName: group.name + }); + + // no mapping, user will have default org membership + if (!externalGroupMapping) return; + + // only get org memberships that are new (invites) + const newOrgMemberships = await orgMembershipDAL.find({ + status: "invited", + $in: { + id: members.map((member) => member.value) + } + }); + + if (!newOrgMemberships.length) return; + + // set new membership roles to group mapping value + await orgMembershipDAL.update( + { + $in: { + id: newOrgMemberships.map((membership) => membership.id) + } + }, + { + role: externalGroupMapping.role, + roleId: externalGroupMapping.roleId + } + ); + }; + const createScimGroup = async ({ displayName, orgId, members }: TCreateScimGroupDTO) => { const plan = await licenseService.getPlan(orgId); if (!plan.groups) @@ -656,6 +788,8 @@ export const scimServiceFactory = ({ tx }); + await $syncNewMembersRoles(group, members); + return { group, newMembers }; } @@ -675,7 +809,9 @@ export const scimServiceFactory = ({ members: orgMemberships.map(({ id, firstName, lastName }) => ({ value: id, display: `${firstName} ${lastName}` - })) + })), + createdAt: newGroup.group.createdAt, + updatedAt: newGroup.group.updatedAt }); }; @@ -698,10 +834,12 @@ export const scimServiceFactory = ({ }); } - const users = await groupDAL.findAllGroupMembers({ - orgId: group.orgId, - groupId: group.id - }); + const users = await groupDAL + .findAllGroupPossibleMembers({ + orgId: group.orgId, + groupId: group.id + }) + .then((g) => g.members); const orgMemberships = await orgDAL.findMembership({ [`${TableName.OrgMembership}.orgId` as "orgId"]: orgId, @@ -718,171 +856,22 @@ export const scimServiceFactory = ({ members: orgMemberships.map(({ id, firstName, lastName }) => ({ value: id, display: `${firstName} ${lastName}` - })) + })), + createdAt: group.createdAt, + updatedAt: group.updatedAt }); }; - const updateScimGroupNamePut = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => { - const plan = await licenseService.getPlan(orgId); - if (!plan.groups) - throw new BadRequestError({ - message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group." - }); - - const org = await orgDAL.findById(orgId); - if (!org) { - throw new ScimRequestError({ - detail: "Organization Not Found", - status: 404 - }); - } - - if (!org.scimEnabled) - throw new ScimRequestError({ - detail: "SCIM is disabled for the organization", - status: 403 - }); - - const updatedGroup = await groupDAL.transaction(async (tx) => { - const [group] = await groupDAL.update( - { - id: groupId, - orgId - }, - { - name: displayName - } - ); - - if (!group) { - throw new ScimRequestError({ - detail: "Group Not Found", - status: 404 - }); - } - - if (members) { - const orgMemberships = await orgMembershipDAL.find({ - $in: { - id: members.map((member) => member.value) - } - }); - - const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId)); - - const directMemberUserIds = ( - await userGroupMembershipDAL.find({ - groupId: group.id, - isPending: false - }) - ).map((membership) => membership.userId); - - const pendingGroupAdditionsUserIds = ( - await userGroupMembershipDAL.find({ - groupId: group.id, - isPending: true - }) - ).map((pendingGroupAddition) => pendingGroupAddition.userId); - - const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds); - const allMembersUserIdsSet = new Set(allMembersUserIds); - - const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string)); - const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId)); - - if (toAddUserIds.length) { - await addUsersToGroupByUserIds({ - group, - userIds: toAddUserIds.map((member) => member.userId as string), - userDAL, - userGroupMembershipDAL, - orgDAL, - groupProjectDAL, - projectKeyDAL, - projectDAL, - projectBotDAL, - tx - }); - } - - if (toRemoveUserIds.length) { - await removeUsersFromGroupByUserIds({ - group, - userIds: toRemoveUserIds, - userDAL, - userGroupMembershipDAL, - groupProjectDAL, - projectKeyDAL, - tx - }); - } - } - - return group; + const $replaceGroupDAL = async ( + groupId: string, + orgId: string, + { displayName, members = [] }: { displayName: string; members: { value: string }[] } + ) => { + let group = await groupDAL.findOne({ + id: groupId, + orgId }); - return buildScimGroup({ - groupId: updatedGroup.id, - name: updatedGroup.name, - members - }); - }; - - // TODO: add support for add/remove op - const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => { - const plan = await licenseService.getPlan(orgId); - if (!plan.groups) - throw new BadRequestError({ - message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group." - }); - - const org = await orgDAL.findById(orgId); - - if (!org) { - throw new ScimRequestError({ - detail: "Organization Not Found", - status: 404 - }); - } - - if (!org.scimEnabled) - throw new ScimRequestError({ - detail: "SCIM is disabled for the organization", - status: 403 - }); - - let group: TGroups | undefined; - for await (const operation of operations) { - switch (operation.op) { - case "replace": { - await groupDAL.update( - { - id: groupId, - orgId - }, - { - name: operation.value.displayName - } - ); - break; - } - case "add": { - // TODO - break; - } - case "remove": { - // TODO - break; - } - default: { - throw new ScimRequestError({ - detail: "Invalid Operation", - status: 400 - }); - } - } - } - if (!group) { throw new ScimRequestError({ detail: "Group Not Found", @@ -890,11 +879,178 @@ export const scimServiceFactory = ({ }); } + const updatedGroup = await groupDAL.transaction(async (tx) => { + if (group.name !== displayName) { + await externalGroupOrgRoleMappingDAL.update( + { + groupName: group.name, + orgId + }, + { + groupName: displayName + } + ); + + const [modifiedGroup] = await groupDAL.update( + { + id: groupId, + orgId + }, + { + name: displayName + } + ); + + group = modifiedGroup; + } + + const orgMemberships = members.length + ? await orgMembershipDAL.find({ + $in: { + id: members.map((member) => member.value) + } + }) + : []; + + const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId)); + const userGroupMembers = await userGroupMembershipDAL.find({ + groupId: group.id + }); + const directMemberUserIds = userGroupMembers.filter((el) => !el.isPending).map((membership) => membership.userId); + + const pendingGroupAdditionsUserIds = userGroupMembers + .filter((el) => el.isPending) + .map((pendingGroupAddition) => pendingGroupAddition.userId); + + const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds); + const allMembersUserIdsSet = new Set(allMembersUserIds); + + const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string)); + const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId)); + + if (toAddUserIds.length) { + await addUsersToGroupByUserIds({ + group, + userIds: toAddUserIds.map((member) => member.userId as string), + userDAL, + userGroupMembershipDAL, + orgDAL, + groupProjectDAL, + projectKeyDAL, + projectDAL, + projectBotDAL, + tx + }); + } + + if (toRemoveUserIds.length) { + await removeUsersFromGroupByUserIds({ + group, + userIds: toRemoveUserIds, + userDAL, + userGroupMembershipDAL, + groupProjectDAL, + projectKeyDAL, + tx + }); + } + + return group; + }); + + await $syncNewMembersRoles(group, members); + + return updatedGroup; + }; + + const replaceScimGroup = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => { + const plan = await licenseService.getPlan(orgId); + if (!plan.groups) + throw new BadRequestError({ + message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group." + }); + + const org = await orgDAL.findById(orgId); + if (!org) { + throw new ScimRequestError({ + detail: "Organization Not Found", + status: 404 + }); + } + + if (!org.scimEnabled) + throw new ScimRequestError({ + detail: "SCIM is disabled for the organization", + status: 403 + }); + + const updatedGroup = await $replaceGroupDAL(groupId, orgId, { displayName, members }); + return buildScimGroup({ + groupId: updatedGroup.id, + name: updatedGroup.name, + members, + updatedAt: updatedGroup.updatedAt, + createdAt: updatedGroup.createdAt + }); + }; + + const updateScimGroup = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => { + const plan = await licenseService.getPlan(orgId); + if (!plan.groups) + throw new BadRequestError({ + message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group." + }); + + const org = await orgDAL.findById(orgId); + + if (!org) { + throw new ScimRequestError({ + detail: "Organization Not Found", + status: 404 + }); + } + + if (!org.scimEnabled) + throw new ScimRequestError({ + detail: "SCIM is disabled for the organization", + status: 403 + }); + + const group = await groupDAL.findOne({ + id: groupId, + orgId + }); + + if (!group) { + throw new ScimRequestError({ + detail: "Group Not Found", + status: 404 + }); + } + + const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId); + const scimGroup = buildScimGroup({ groupId: group.id, name: group.name, - members: [] + members: members.map((member) => ({ + value: member.orgMembershipId + })), + createdAt: group.createdAt, + updatedAt: group.updatedAt }); + scimPatch(scimGroup, operations); + // remove members is a weird case not following scim convention + await $replaceGroupDAL(groupId, orgId, { displayName: scimGroup.displayName, members: scimGroup.members }); + + const updatedScimMembers = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId); + return { + ...scimGroup, + members: updatedScimMembers.map((member) => ({ + value: member.orgMembershipId, + display: `${member.firstName ?? ""} ${member.lastName ?? ""}` + })) + }; }; const deleteScimGroup = async ({ groupId, orgId }: TDeleteScimGroupDTO) => { @@ -970,8 +1126,8 @@ export const scimServiceFactory = ({ createScimGroup, getScimGroup, deleteScimGroup, - updateScimGroupNamePut, - updateScimGroupNamePatch, + replaceScimGroup, + updateScimGroup, fnValidateScimToken }; }; diff --git a/backend/src/ee/services/scim/scim-types.ts b/backend/src/ee/services/scim/scim-types.ts index 46ab90b8f0..5099e4ca07 100644 --- a/backend/src/ee/services/scim/scim-types.ts +++ b/backend/src/ee/services/scim/scim-types.ts @@ -1,3 +1,5 @@ +import { ScimPatchOperation } from "scim-patch"; + import { TOrgPermission } from "@app/lib/types"; export type TCreateScimTokenDTO = { @@ -34,29 +36,25 @@ export type TGetScimUserDTO = { export type TCreateScimUserDTO = { externalId: string; email?: string; - firstName: string; - lastName: string; + firstName?: string; + lastName?: string; orgId: string; }; export type TUpdateScimUserDTO = { orgMembershipId: string; orgId: string; - operations: { - op: string; - path?: string; - value?: - | string - | { - active: boolean; - }; - }[]; + operations: ScimPatchOperation[]; }; export type TReplaceScimUserDTO = { orgMembershipId: string; active: boolean; orgId: string; + email?: string; + firstName?: string; + lastName?: string; + externalId: string; }; export type TDeleteScimUserDTO = { @@ -66,8 +64,10 @@ export type TDeleteScimUserDTO = { export type TListScimGroupsDTO = { startIndex: number; + filter?: string; limit: number; orgId: string; + isMembersExcluded?: boolean; }; export type TListScimGroups = { @@ -106,28 +106,7 @@ export type TUpdateScimGroupNamePutDTO = { export type TUpdateScimGroupNamePatchDTO = { groupId: string; orgId: string; - operations: (TRemoveOp | TReplaceOp | TAddOp)[]; -}; - -type TReplaceOp = { - op: "replace"; - value: { - id: string; - displayName: string; - }; -}; - -type TRemoveOp = { - op: "remove"; - path: string; -}; - -type TAddOp = { - op: "add"; - value: { - value: string; - display?: string; - }; + operations: ScimPatchOperation[]; }; export type TDeleteScimGroupDTO = { @@ -156,10 +135,10 @@ export type TScimUser = { type: string; }[]; active: boolean; - groups: string[]; meta: { resourceType: string; - location: null; + created: Date; + lastModified: Date; }; }; @@ -169,10 +148,11 @@ export type TScimGroup = { displayName: string; members: { value: string; - display: string; + display?: string; }[]; meta: { resourceType: string; - location: null; + created: Date; + lastModified: Date; }; }; diff --git a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-dal.ts b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-dal.ts index eec3d9a1d1..bb77660aa6 100644 --- a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-dal.ts +++ b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-dal.ts @@ -1,69 +1,176 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName, TSecretApprovalPolicies } from "@app/db/schemas"; +import { SecretApprovalPoliciesSchema, TableName, TSecretApprovalPolicies, TUsers } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { buildFindFilter, mergeOneToManyRelation, ormify, selectAllTableCols, TFindFilter } from "@app/lib/knex"; +import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex"; + +import { ApproverType } from "../access-approval-policy/access-approval-policy-types"; export type TSecretApprovalPolicyDALFactory = ReturnType; export const secretApprovalPolicyDALFactory = (db: TDbClient) => { const secretApprovalPolicyOrm = ormify(db, TableName.SecretApprovalPolicy); - const sapFindQuery = (tx: Knex, filter: TFindFilter) => + const secretApprovalPolicyFindQuery = ( + tx: Knex, + filter: TFindFilter, + customFilter?: { + sapId?: string; + } + ) => tx(TableName.SecretApprovalPolicy) // eslint-disable-next-line .where(buildFindFilter(filter)) + .where((qb) => { + if (customFilter?.sapId) { + void qb.where(`${TableName.SecretApprovalPolicy}.id`, "=", customFilter.sapId); + } + }) .join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`) - .join( + .leftJoin( TableName.SecretApprovalPolicyApprover, `${TableName.SecretApprovalPolicy}.id`, `${TableName.SecretApprovalPolicyApprover}.policyId` ) - .select(tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover)) - .select(tx.ref("name").withSchema(TableName.Environment).as("envName")) - .select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug")) - .select(tx.ref("id").withSchema(TableName.Environment).as("envId")) - .select(tx.ref("projectId").withSchema(TableName.Environment)) + .leftJoin( + TableName.UserGroupMembership, + `${TableName.SecretApprovalPolicyApprover}.approverGroupId`, + `${TableName.UserGroupMembership}.groupId` + ) + .leftJoin( + db(TableName.Users).as("secretApprovalPolicyApproverUser"), + `${TableName.SecretApprovalPolicyApprover}.approverUserId`, + "secretApprovalPolicyApproverUser.id" + ) + .leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`) + .select( + tx.ref("id").withSchema("secretApprovalPolicyApproverUser").as("approverUserId"), + tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"), + tx.ref("firstName").withSchema("secretApprovalPolicyApproverUser").as("approverFirstName"), + tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"), + tx.ref("lastName").withSchema("secretApprovalPolicyApproverUser").as("approverLastName") + ) + .select( + tx.ref("approverGroupId").withSchema(TableName.SecretApprovalPolicyApprover), + tx.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"), + tx.ref("email").withSchema(TableName.Users).as("approverGroupEmail"), + tx.ref("firstName").withSchema(TableName.Users).as("approverGroupFirstName"), + tx.ref("lastName").withSchema(TableName.Users).as("approverGroupLastName") + ) + .select( + tx.ref("name").withSchema(TableName.Environment).as("envName"), + tx.ref("slug").withSchema(TableName.Environment).as("envSlug"), + tx.ref("id").withSchema(TableName.Environment).as("envId"), + tx.ref("projectId").withSchema(TableName.Environment) + ) .select(selectAllTableCols(TableName.SecretApprovalPolicy)) .orderBy("createdAt", "asc"); const findById = async (id: string, tx?: Knex) => { try { - const doc = await sapFindQuery(tx || db, { + const doc = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), { [`${TableName.SecretApprovalPolicy}.id` as "id"]: id }); - const formatedDoc = mergeOneToManyRelation( - doc, - "id", - ({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({ - ...el, - envId, - environment: { id: envId, name, slug } + const formatedDoc = sqlNestRelationships({ + data: doc, + key: "id", + parentMapper: (data) => ({ + environment: { id: data.envId, name: data.envName, slug: data.envSlug }, + projectId: data.projectId, + ...SecretApprovalPoliciesSchema.parse(data) }), - ({ approverId }) => approverId, - "approvers" - ); + childrenMapper: [ + { + key: "approverUserId", + label: "userApprovers" as const, + mapper: ({ + approverUserId: userId, + approverEmail: email, + approverFirstName: firstName, + approverLastName: lastName + }) => ({ + userId, + email, + firstName, + lastName + }) + }, + { + key: "approverGroupUserId", + label: "userApprovers" as const, + mapper: ({ + approverGroupUserId: userId, + approverGroupEmail: email, + approverGroupFirstName: firstName, + approverGroupLastName: lastName + }) => ({ + userId, + email, + firstName, + lastName + }) + } + ] + }); + return formatedDoc?.[0]; } catch (error) { throw new DatabaseError({ error, name: "FindById" }); } }; - const find = async (filter: TFindFilter, tx?: Knex) => { + const find = async ( + filter: TFindFilter, + customFilter?: { + sapId?: string; + }, + tx?: Knex + ) => { try { - const docs = await sapFindQuery(tx || db, filter); - const formatedDoc = mergeOneToManyRelation( - docs, - "id", - ({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({ - ...el, - envId, - environment: { id: envId, name, slug } + const docs = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), filter, customFilter); + const formatedDoc = sqlNestRelationships({ + data: docs, + key: "id", + parentMapper: (data) => ({ + environment: { id: data.envId, name: data.envName, slug: data.envSlug }, + projectId: data.projectId, + ...SecretApprovalPoliciesSchema.parse(data) }), - ({ approverId }) => approverId, - "approvers" - ); + childrenMapper: [ + { + key: "approverUserId", + label: "approvers" as const, + mapper: ({ approverUserId: id, approverUsername }) => ({ + type: ApproverType.User, + name: approverUsername, + id + }) + }, + { + key: "approverGroupId", + label: "approvers" as const, + mapper: ({ approverGroupId: id }) => ({ + type: ApproverType.Group, + id + }) + }, + { + key: "approverUserId", + label: "userApprovers" as const, + mapper: ({ approverUserId: userId }) => ({ + userId + }) + }, + { + key: "approverGroupUserId", + label: "userApprovers" as const, + mapper: ({ approverGroupUserId: userId }) => ({ + userId + }) + } + ] + }); return formatedDoc; } catch (error) { throw new DatabaseError({ error, name: "Find" }); diff --git a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts index 8ddadb9bf6..cb34526854 100644 --- a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts +++ b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-service.ts @@ -1,19 +1,23 @@ -import { ForbiddenError, subject } from "@casl/ability"; +import { ForbiddenError } from "@casl/ability"; import picomatch from "picomatch"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { removeTrailingSlash } from "@app/lib/fn"; import { containsGlobPatterns } from "@app/lib/picomatch"; import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal"; -import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; +import { TUserDALFactory } from "@app/services/user/user-dal"; +import { ApproverType } from "../access-approval-policy/access-approval-policy-types"; +import { TLicenseServiceFactory } from "../license/license-service"; import { TSecretApprovalPolicyApproverDALFactory } from "./secret-approval-policy-approver-dal"; import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal"; import { TCreateSapDTO, TDeleteSapDTO, TGetBoardSapDTO, + TGetSapByIdDTO, TListSapDTO, TUpdateSapDTO } from "./secret-approval-policy-types"; @@ -27,8 +31,9 @@ type TSecretApprovalPolicyServiceFactoryDep = { permissionService: Pick; secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory; projectEnvDAL: Pick; + userDAL: Pick; secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory; - projectMembershipDAL: Pick; + licenseService: Pick; }; export type TSecretApprovalPolicyServiceFactory = ReturnType; @@ -38,7 +43,8 @@ export const secretApprovalPolicyServiceFactory = ({ permissionService, secretApprovalPolicyApproverDAL, projectEnvDAL, - projectMembershipDAL + userDAL, + licenseService }: TSecretApprovalPolicyServiceFactoryDep) => { const createSecretApprovalPolicy = async ({ name, @@ -50,9 +56,22 @@ export const secretApprovalPolicyServiceFactory = ({ approvers, projectId, secretPath, - environment + environment, + enforcementLevel }: TCreateSapDTO) => { - if (approvals > approvers.length) + const groupApprovers = approvers + ?.filter((approver) => approver.type === ApproverType.Group) + .map((approver) => approver.id); + const userApprovers = approvers + ?.filter((approver) => approver.type === ApproverType.User) + .map((approver) => approver.id) + .filter(Boolean) as string[]; + + const userApproverNames = approvers + .map((approver) => (approver.type === ApproverType.User ? approver.name : undefined)) + .filter(Boolean) as string[]; + + if (!groupApprovers.length && approvals > approvers.length) throw new BadRequestError({ message: "Approvals cannot be greater than approvers" }); const { permission } = await permissionService.getProjectPermission( @@ -66,15 +85,20 @@ export const secretApprovalPolicyServiceFactory = ({ ProjectPermissionActions.Create, ProjectPermissionSub.SecretApproval ); - const env = await projectEnvDAL.findOne({ slug: environment, projectId }); - if (!env) throw new BadRequestError({ message: "Environment not found" }); - const secretApprovers = await projectMembershipDAL.find({ - projectId, - $in: { id: approvers } - }); - if (secretApprovers.length !== approvers.length) - throw new BadRequestError({ message: "Approver not found in project" }); + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.secretApproval) { + throw new BadRequestError({ + message: + "Failed to create secret approval policy due to plan restriction. Upgrade plan to create secret approval policy." + }); + } + + const env = await projectEnvDAL.findOne({ slug: environment, projectId }); + if (!env) + throw new NotFoundError({ + message: `Environment with slug '${environment}' not found in project with ID ${projectId}` + }); const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => { const doc = await secretApprovalPolicyDAL.create( @@ -82,19 +106,53 @@ export const secretApprovalPolicyServiceFactory = ({ envId: env.id, approvals, secretPath, - name + name, + enforcementLevel }, tx ); + + let userApproverIds = userApprovers; + if (userApproverNames.length) { + const approverUsers = await userDAL.find( + { + $in: { + username: userApproverNames + } + }, + { tx } + ); + + const approverNamesFromDb = approverUsers.map((user) => user.username); + const invalidUsernames = userApproverNames?.filter((username) => !approverNamesFromDb.includes(username)); + + if (invalidUsernames?.length) { + throw new BadRequestError({ + message: `Invalid approver user: ${invalidUsernames.join(", ")}` + }); + } + + userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id)); + } + await secretApprovalPolicyApproverDAL.insertMany( - secretApprovers.map(({ id }) => ({ - approverId: id, + userApproverIds.map((approverUserId) => ({ + approverUserId, + policyId: doc.id + })), + tx + ); + + await secretApprovalPolicyApproverDAL.insertMany( + groupApprovers.map((approverGroupId) => ({ + approverGroupId, policyId: doc.id })), tx ); return doc; }); + return { ...secretApproval, environment: env, projectId }; }; @@ -107,10 +165,27 @@ export const secretApprovalPolicyServiceFactory = ({ actorOrgId, actorAuthMethod, approvals, - secretPolicyId + secretPolicyId, + enforcementLevel }: TUpdateSapDTO) => { + const groupApprovers = approvers + ?.filter((approver) => approver.type === ApproverType.Group) + .map((approver) => approver.id); + const userApprovers = approvers + ?.filter((approver) => approver.type === ApproverType.User) + .map((approver) => approver.id) + .filter(Boolean) as string[]; + + const userApproverNames = approvers + .map((approver) => (approver.type === ApproverType.User ? approver.name : undefined)) + .filter(Boolean) as string[]; + const secretApprovalPolicy = await secretApprovalPolicyDAL.findById(secretPolicyId); - if (!secretApprovalPolicy) throw new BadRequestError({ message: "Secret approval policy not found" }); + if (!secretApprovalPolicy) { + throw new NotFoundError({ + message: `Secret approval policy with ID '${secretPolicyId}' not found` + }); + } const { permission } = await permissionService.getProjectPermission( actor, @@ -121,37 +196,71 @@ export const secretApprovalPolicyServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval); + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.secretApproval) { + throw new BadRequestError({ + message: + "Failed to update secret approval policy due to plan restriction. Upgrade plan to update secret approval policy." + }); + } + const updatedSap = await secretApprovalPolicyDAL.transaction(async (tx) => { const doc = await secretApprovalPolicyDAL.updateById( secretApprovalPolicy.id, { approvals, secretPath, - name + name, + enforcementLevel }, tx ); + + await secretApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx); + if (approvers) { - const secretApprovers = await projectMembershipDAL.find( - { - projectId: secretApprovalPolicy.projectId, - $in: { id: approvers } - }, - { tx } - ); - if (secretApprovers.length !== approvers.length) - throw new BadRequestError({ message: "Approver not found in project" }); - if (doc.approvals > secretApprovers.length) - throw new BadRequestError({ message: "Approvals cannot be greater than approvers" }); - await secretApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx); + let userApproverIds = userApprovers; + if (userApproverNames) { + const approverUsers = await userDAL.find( + { + $in: { + username: userApproverNames + } + }, + { tx } + ); + + const approverNamesFromDb = approverUsers.map((user) => user.username); + const invalidUsernames = userApproverNames?.filter((username) => !approverNamesFromDb.includes(username)); + + if (invalidUsernames?.length) { + throw new BadRequestError({ + message: `Invalid approver user: ${invalidUsernames.join(", ")}` + }); + } + + userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id)); + } + await secretApprovalPolicyApproverDAL.insertMany( - secretApprovers.map(({ id }) => ({ - approverId: id, + userApproverIds.map((approverUserId) => ({ + approverUserId, policyId: doc.id })), tx ); } + + if (groupApprovers) { + await secretApprovalPolicyApproverDAL.insertMany( + groupApprovers.map((approverGroupId) => ({ + approverGroupId, + policyId: doc.id + })), + tx + ); + } + return doc; }); return { @@ -169,7 +278,8 @@ export const secretApprovalPolicyServiceFactory = ({ actorOrgId }: TDeleteSapDTO) => { const sapPolicy = await secretApprovalPolicyDAL.findById(secretPolicyId); - if (!sapPolicy) throw new BadRequestError({ message: "Secret approval policy not found" }); + if (!sapPolicy) + throw new NotFoundError({ message: `Secret approval policy with ID '${secretPolicyId}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -183,6 +293,14 @@ export const secretApprovalPolicyServiceFactory = ({ ProjectPermissionSub.SecretApproval ); + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.secretApproval) { + throw new BadRequestError({ + message: + "Failed to update secret approval policy due to plan restriction. Upgrade plan to update secret approval policy." + }); + } + await secretApprovalPolicyDAL.deleteById(secretPolicyId); return sapPolicy; }; @@ -207,9 +325,14 @@ export const secretApprovalPolicyServiceFactory = ({ return sapPolicies; }; - const getSecretApprovalPolicy = async (projectId: string, environment: string, secretPath: string) => { + const getSecretApprovalPolicy = async (projectId: string, environment: string, path: string) => { + const secretPath = removeTrailingSlash(path); const env = await projectEnvDAL.findOne({ slug: environment, projectId }); - if (!env) throw new BadRequestError({ message: "Environment not found" }); + if (!env) { + throw new NotFoundError({ + message: `Environment with slug '${environment}' not found in project with ID ${projectId}` + }); + } const policies = await secretApprovalPolicyDAL.find({ envId: env.id }); if (!policies.length) return; @@ -233,18 +356,37 @@ export const secretApprovalPolicyServiceFactory = ({ environment, secretPath }: TGetBoardSapDTO) => { + await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId); + + return getSecretApprovalPolicy(projectId, environment, secretPath); + }; + + const getSecretApprovalPolicyById = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + sapId + }: TGetSapByIdDTO) => { + const [sapPolicy] = await secretApprovalPolicyDAL.find({}, { sapId }); + + if (!sapPolicy) { + throw new NotFoundError({ + message: `Secret approval policy with ID '${sapId}' not found` + }); + } + const { permission } = await permissionService.getProjectPermission( actor, actorId, - projectId, + sapPolicy.projectId, actorAuthMethod, actorOrgId ); - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { secretPath, environment }) - ); - return getSecretApprovalPolicy(projectId, environment, secretPath); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval); + + return sapPolicy; }; return { @@ -253,6 +395,7 @@ export const secretApprovalPolicyServiceFactory = ({ deleteSecretApprovalPolicy, getSecretApprovalPolicy, getSecretApprovalPolicyByProjectId, - getSecretApprovalPolicyOfFolder + getSecretApprovalPolicyOfFolder, + getSecretApprovalPolicyById }; }; diff --git a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-types.ts b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-types.ts index 2ddd9b51be..863f1c926a 100644 --- a/backend/src/ee/services/secret-approval-policy/secret-approval-policy-types.ts +++ b/backend/src/ee/services/secret-approval-policy/secret-approval-policy-types.ts @@ -1,20 +1,24 @@ -import { TProjectPermission } from "@app/lib/types"; +import { EnforcementLevel, TProjectPermission } from "@app/lib/types"; + +import { ApproverType } from "../access-approval-policy/access-approval-policy-types"; export type TCreateSapDTO = { approvals: number; secretPath?: string | null; environment: string; - approvers: string[]; + approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[]; projectId: string; name: string; + enforcementLevel: EnforcementLevel; } & Omit; export type TUpdateSapDTO = { secretPolicyId: string; approvals?: number; secretPath?: string | null; - approvers: string[]; + approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[]; name?: string; + enforcementLevel?: EnforcementLevel; } & Omit; export type TDeleteSapDTO = { @@ -23,6 +27,8 @@ export type TDeleteSapDTO = { export type TListSapDTO = TProjectPermission; +export type TGetSapByIdDTO = Omit & { sapId: string }; + export type TGetBoardSapDTO = { projectId: string; environment: string; diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts index 05fe1b8f8b..803b9464c2 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts @@ -5,7 +5,8 @@ import { SecretApprovalRequestsSchema, TableName, TSecretApprovalRequests, - TSecretApprovalRequestsSecrets + TSecretApprovalRequestsSecrets, + TUsers } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships, stripUndefinedInWhere, TFindFilter } from "@app/lib/knex"; @@ -16,7 +17,7 @@ export type TSecretApprovalRequestDALFactory = ReturnType { `${TableName.SecretApprovalRequest}.policyId`, `${TableName.SecretApprovalPolicy}.id` ) - .join( + .leftJoin( + db(TableName.Users).as("statusChangedByUser"), + `${TableName.SecretApprovalRequest}.statusChangedByUserId`, + `statusChangedByUser.id` + ) + .join( + db(TableName.Users).as("committerUser"), + `${TableName.SecretApprovalRequest}.committerUserId`, + `committerUser.id` + ) + .leftJoin( TableName.SecretApprovalPolicyApprover, `${TableName.SecretApprovalPolicy}.id`, `${TableName.SecretApprovalPolicyApprover}.policyId` ) + .leftJoin( + db(TableName.Users).as("secretApprovalPolicyApproverUser"), + `${TableName.SecretApprovalPolicyApprover}.approverUserId`, + "secretApprovalPolicyApproverUser.id" + ) + .leftJoin( + TableName.UserGroupMembership, + `${TableName.SecretApprovalPolicyApprover}.approverGroupId`, + `${TableName.UserGroupMembership}.groupId` + ) + .leftJoin( + db(TableName.Users).as("secretApprovalPolicyGroupApproverUser"), + `${TableName.UserGroupMembership}.userId`, + `secretApprovalPolicyGroupApproverUser.id` + ) .leftJoin( TableName.SecretApprovalRequestReviewer, `${TableName.SecretApprovalRequest}.id`, `${TableName.SecretApprovalRequestReviewer}.requestId` ) + .leftJoin( + db(TableName.Users).as("secretApprovalReviewerUser"), + `${TableName.SecretApprovalRequestReviewer}.reviewerUserId`, + `secretApprovalReviewerUser.id` + ) .select(selectAllTableCols(TableName.SecretApprovalRequest)) .select( - tx.ref("member").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"), + tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover), + tx.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"), + tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"), + tx.ref("email").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupEmail"), + tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"), + tx.ref("username").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupUsername"), + tx.ref("firstName").withSchema("secretApprovalPolicyApproverUser").as("approverFirstName"), + tx.ref("firstName").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupFirstName"), + tx.ref("lastName").withSchema("secretApprovalPolicyApproverUser").as("approverLastName"), + tx.ref("lastName").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupLastName"), + tx.ref("email").withSchema("statusChangedByUser").as("statusChangedByUserEmail"), + tx.ref("username").withSchema("statusChangedByUser").as("statusChangedByUserUsername"), + tx.ref("firstName").withSchema("statusChangedByUser").as("statusChangedByUserFirstName"), + tx.ref("lastName").withSchema("statusChangedByUser").as("statusChangedByUserLastName"), + tx.ref("email").withSchema("committerUser").as("committerUserEmail"), + tx.ref("username").withSchema("committerUser").as("committerUserUsername"), + tx.ref("firstName").withSchema("committerUser").as("committerUserFirstName"), + tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"), + tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer), tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"), + tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"), + tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"), + tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"), + tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"), tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"), tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"), tx.ref("projectId").withSchema(TableName.Environment), tx.ref("slug").withSchema(TableName.Environment).as("environment"), tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"), - tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"), - tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover) + tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"), + tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"), + tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals") ); const findById = async (id: string, tx?: Knex) => { try { - const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db); + const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode()); const docs = await sql; const formatedDoc = sqlNestRelationships({ data: docs, @@ -71,20 +125,78 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { ...SecretApprovalRequestsSchema.parse(el), projectId: el.projectId, environment: el.environment, + statusChangedByUser: el.statusChangedByUserId + ? { + userId: el.statusChangedByUserId, + email: el.statusChangedByUserEmail, + firstName: el.statusChangedByUserFirstName, + lastName: el.statusChangedByUserLastName, + username: el.statusChangedByUserUsername + } + : undefined, + committerUser: { + userId: el.committerUserId, + email: el.committerUserEmail, + firstName: el.committerUserFirstName, + lastName: el.committerUserLastName, + username: el.committerUserUsername + }, policy: { id: el.policyId, name: el.policyName, approvals: el.policyApprovals, - secretPath: el.policySecretPath + secretPath: el.policySecretPath, + enforcementLevel: el.policyEnforcementLevel, + envId: el.policyEnvId } }), childrenMapper: [ { - key: "reviewerMemberId", + key: "reviewerUserId", label: "reviewers" as const, - mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined) + mapper: ({ + reviewerUserId: userId, + reviewerStatus: status, + reviewerEmail: email, + reviewerLastName: lastName, + reviewerUsername: username, + reviewerFirstName: firstName + }) => (userId ? { userId, status, email, firstName, lastName, username } : undefined) }, - { key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId } + { + key: "approverUserId", + label: "approvers" as const, + mapper: ({ + approverUserId: userId, + approverEmail: email, + approverUsername: username, + approverLastName: lastName, + approverFirstName: firstName + }) => ({ + userId, + email, + firstName, + lastName, + username + }) + }, + { + key: "approverGroupUserId", + label: "approvers" as const, + mapper: ({ + approverGroupUserId: userId, + approverGroupEmail: email, + approverGroupUsername: username, + approverGroupLastName: lastName, + approverGroupFirstName: firstName + }) => ({ + userId, + email, + firstName, + lastName, + username + }) + } ] }); if (!formatedDoc?.[0]) return; @@ -97,12 +209,12 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { } }; - const findProjectRequestCount = async (projectId: string, membershipId: string, tx?: Knex) => { + const findProjectRequestCount = async (projectId: string, userId: string, tx?: Knex) => { try { const docs = await (tx || db) .with( "temp", - (tx || db)(TableName.SecretApprovalRequest) + (tx || db.replicaNode())(TableName.SecretApprovalRequest) .join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`) .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) .join( @@ -114,8 +226,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { .andWhere( (bd) => void bd - .where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId) - .orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId) + .where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId) + .orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId) ) .select("status", `${TableName.SecretApprovalRequest}.id`) .groupBy(`${TableName.SecretApprovalRequest}.id`, "status") @@ -142,13 +254,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { }; const findByProjectId = async ( - { status, limit = 20, offset = 0, projectId, committer, environment, membershipId }: TFindQueryFilter, + { status, limit = 20, offset = 0, projectId, committer, environment, userId }: TFindQueryFilter, tx?: Knex ) => { try { // akhilmhdh: If ever u wanted a 1 to so many relationship connected with pagination // this is the place u wanna look at. - const query = (tx || db)(TableName.SecretApprovalRequest) + const query = (tx || db.replicaNode())(TableName.SecretApprovalRequest) .join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`) .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) .join( @@ -156,11 +268,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { `${TableName.SecretApprovalRequest}.policyId`, `${TableName.SecretApprovalPolicy}.id` ) - .join( + .leftJoin( TableName.SecretApprovalPolicyApprover, `${TableName.SecretApprovalPolicy}.id`, `${TableName.SecretApprovalPolicyApprover}.policyId` ) + .leftJoin( + TableName.UserGroupMembership, + `${TableName.SecretApprovalPolicyApprover}.approverGroupId`, + `${TableName.UserGroupMembership}.groupId` + ) + .join( + db(TableName.Users).as("committerUser"), + `${TableName.SecretApprovalRequest}.committerUserId`, + `committerUser.id` + ) .leftJoin( TableName.SecretApprovalRequestReviewer, `${TableName.SecretApprovalRequest}.id`, @@ -176,20 +298,22 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { projectId, [`${TableName.Environment}.slug` as "slug"]: environment, [`${TableName.SecretApprovalRequest}.status`]: status, - committerId: committer + committerUserId: committer }) ) .andWhere( (bd) => void bd - .where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId) - .orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId) + .where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId) + .orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId) + .orWhere(`${TableName.UserGroupMembership}.userId`, userId) ) .select(selectAllTableCols(TableName.SecretApprovalRequest)) .select( db.ref("projectId").withSchema(TableName.Environment), db.ref("slug").withSchema(TableName.Environment).as("environment"), - db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"), + db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerId"), + db.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer), db.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"), db.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"), db.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"), @@ -200,8 +324,14 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { `DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank` ), db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"), + db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"), db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"), - db.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover) + db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover), + db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"), + db.ref("email").withSchema("committerUser").as("committerUserEmail"), + db.ref("username").withSchema("committerUser").as("committerUserUsername"), + db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"), + db.ref("lastName").withSchema("committerUser").as("committerUserLastName") ) .orderBy("createdAt", "desc"); @@ -222,19 +352,28 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { id: el.policyId, name: el.policyName, approvals: el.policyApprovals, - secretPath: el.policySecretPath + secretPath: el.policySecretPath, + enforcementLevel: el.policyEnforcementLevel + }, + committerUser: { + userId: el.committerUserId, + email: el.committerUserEmail, + firstName: el.committerUserFirstName, + lastName: el.committerUserLastName, + username: el.committerUserUsername } }), childrenMapper: [ { - key: "reviewerMemberId", + key: "reviewerId", label: "reviewers" as const, - mapper: ({ reviewerMemberId: member, reviewerStatus: s }) => (member ? { member, status: s } : undefined) + mapper: ({ reviewerUserId, reviewerStatus: s }) => + reviewerUserId ? { userId: reviewerUserId, status: s } : undefined }, { - key: "approverId", + key: "approverUserId", label: "approvers" as const, - mapper: ({ approverId }) => approverId + mapper: ({ approverUserId }) => ({ userId: approverUserId }) }, { key: "commitId", @@ -244,6 +383,160 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { id, secretId }) + }, + { + key: "approverGroupUserId", + label: "approvers" as const, + mapper: ({ approverGroupUserId }) => ({ userId: approverGroupUserId }) + } + ] + }); + return formatedDoc.map((el) => ({ + ...el, + policy: { ...el.policy, approvers: el.approvers } + })); + } catch (error) { + throw new DatabaseError({ error, name: "FindSAR" }); + } + }; + + const findByProjectIdBridgeSecretV2 = async ( + { status, limit = 20, offset = 0, projectId, committer, environment, userId }: TFindQueryFilter, + tx?: Knex + ) => { + try { + // akhilmhdh: If ever u wanted a 1 to so many relationship connected with pagination + // this is the place u wanna look at. + const query = (tx || db.replicaNode())(TableName.SecretApprovalRequest) + .join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .join( + TableName.SecretApprovalPolicy, + `${TableName.SecretApprovalRequest}.policyId`, + `${TableName.SecretApprovalPolicy}.id` + ) + .leftJoin( + TableName.SecretApprovalPolicyApprover, + `${TableName.SecretApprovalPolicy}.id`, + `${TableName.SecretApprovalPolicyApprover}.policyId` + ) + .leftJoin( + TableName.UserGroupMembership, + `${TableName.SecretApprovalPolicyApprover}.approverGroupId`, + `${TableName.UserGroupMembership}.groupId` + ) + .join( + db(TableName.Users).as("committerUser"), + `${TableName.SecretApprovalRequest}.committerUserId`, + `committerUser.id` + ) + .leftJoin( + TableName.SecretApprovalRequestReviewer, + `${TableName.SecretApprovalRequest}.id`, + `${TableName.SecretApprovalRequestReviewer}.requestId` + ) + .leftJoin( + TableName.SecretApprovalRequestSecretV2, + `${TableName.SecretApprovalRequestSecretV2}.requestId`, + `${TableName.SecretApprovalRequest}.id` + ) + .where( + stripUndefinedInWhere({ + projectId, + [`${TableName.Environment}.slug` as "slug"]: environment, + [`${TableName.SecretApprovalRequest}.status`]: status, + committerUserId: committer + }) + ) + .andWhere( + (bd) => + void bd + .where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId) + .orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId) + .orWhere(`${TableName.UserGroupMembership}.userId`, userId) + ) + .select(selectAllTableCols(TableName.SecretApprovalRequest)) + .select( + db.ref("projectId").withSchema(TableName.Environment), + db.ref("slug").withSchema(TableName.Environment).as("environment"), + db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerId"), + db.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer), + db.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"), + db.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"), + db.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"), + db.ref("op").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitOp"), + db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitSecretId"), + db.ref("id").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitId"), + db.raw( + `DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank` + ), + db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"), + db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"), + db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"), + db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover), + db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"), + db.ref("email").withSchema("committerUser").as("committerUserEmail"), + db.ref("username").withSchema("committerUser").as("committerUserUsername"), + db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"), + db.ref("lastName").withSchema("committerUser").as("committerUserLastName") + ) + .orderBy("createdAt", "desc"); + + const docs = await (tx || db) + .with("w", query) + .select("*") + .from[number]>("w") + .where("w.rank", ">=", offset) + .andWhere("w.rank", "<", offset + limit); + const formatedDoc = sqlNestRelationships({ + data: docs, + key: "id", + parentMapper: (el) => ({ + ...SecretApprovalRequestsSchema.parse(el), + environment: el.environment, + projectId: el.projectId, + policy: { + id: el.policyId, + name: el.policyName, + approvals: el.policyApprovals, + secretPath: el.policySecretPath, + enforcementLevel: el.policyEnforcementLevel + }, + committerUser: { + userId: el.committerUserId, + email: el.committerUserEmail, + firstName: el.committerUserFirstName, + lastName: el.committerUserLastName, + username: el.committerUserUsername + } + }), + childrenMapper: [ + { + key: "reviewerId", + label: "reviewers" as const, + mapper: ({ reviewerUserId, reviewerStatus: s }) => + reviewerUserId ? { userId: reviewerUserId, status: s } : undefined + }, + { + key: "approverUserId", + label: "approvers" as const, + mapper: ({ approverUserId }) => ({ userId: approverUserId }) + }, + { + key: "commitId", + label: "commits" as const, + mapper: ({ commitSecretId: secretId, commitId: id, commitOp: op }) => ({ + op, + id, + secretId + }) + }, + { + key: "approverGroupUserId", + label: "approvers" as const, + mapper: ({ approverGroupUserId }) => ({ + userId: approverGroupUserId + }) } ] }); @@ -256,5 +549,26 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { } }; - return { ...secretApprovalRequestOrm, findById, findProjectRequestCount, findByProjectId }; + const deleteByProjectId = async (projectId: string, tx?: Knex) => { + try { + const query = await (tx || db)(TableName.SecretApprovalRequest) + .join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .where({ projectId }) + .delete(); + + return query; + } catch (error) { + throw new DatabaseError({ error, name: "DeleteByProjectId" }); + } + }; + + return { + ...secretApprovalRequestOrm, + findById, + findProjectRequestCount, + findByProjectId, + findByProjectIdBridgeSecretV2, + deleteByProjectId + }; }; diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-fns.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-fns.ts new file mode 100644 index 0000000000..05b7280b28 --- /dev/null +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-fns.ts @@ -0,0 +1,44 @@ +import { TSecretApprovalRequests } from "@app/db/schemas"; +import { getConfig } from "@app/lib/config/env"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; + +import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal"; + +type TSendApprovalEmails = { + secretApprovalPolicyDAL: Pick; + projectDAL: Pick; + smtpService: Pick; + projectId: string; + secretApprovalRequest: TSecretApprovalRequests; +}; + +export const sendApprovalEmailsFn = async ({ + secretApprovalPolicyDAL, + projectDAL, + smtpService, + projectId, + secretApprovalRequest +}: TSendApprovalEmails) => { + const cfg = getConfig(); + + const policy = await secretApprovalPolicyDAL.findById(secretApprovalRequest.policyId); + + const project = await projectDAL.findProjectWithOrg(projectId); + + // now we need to go through each of the reviewers and print out all the commits that they need to approve + for await (const reviewerUser of policy.userApprovers) { + await smtpService.sendMail({ + recipients: [reviewerUser?.email as string], + subjectLine: "Infisical Secret Change Request", + + substitutions: { + firstName: reviewerUser.firstName, + projectName: project.name, + organizationName: project.organization.name, + approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval?requestId=${secretApprovalRequest.id}` + }, + template: SmtpTemplates.SecretApprovalRequestNeedsReview + }); + } +}; diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-secret-dal.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-secret-dal.ts index 736cd253ea..77f38dd536 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-secret-dal.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-secret-dal.ts @@ -3,11 +3,12 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { SecretApprovalRequestsSecretsSchema, + SecretApprovalRequestsSecretsV2Schema, TableName, TSecretApprovalRequestsSecrets, TSecretTags } from "@app/db/schemas"; -import { BadRequestError, DatabaseError } from "@app/lib/errors"; +import { DatabaseError, NotFoundError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; export type TSecretApprovalRequestSecretDALFactory = ReturnType; @@ -15,6 +16,8 @@ export type TSecretApprovalRequestSecretDALFactory = ReturnType { const secretApprovalRequestSecretOrm = ormify(db, TableName.SecretApprovalRequestSecret); const secretApprovalRequestSecretTagOrm = ormify(db, TableName.SecretApprovalRequestSecretTag); + const secretApprovalRequestSecretV2TagOrm = ormify(db, TableName.SecretApprovalRequestSecretTagV2); + const secretApprovalRequestSecretV2Orm = ormify(db, TableName.SecretApprovalRequestSecretV2); const bulkUpdateNoVersionIncrement = async (data: TSecretApprovalRequestsSecrets[], tx?: Knex) => { try { @@ -28,7 +31,7 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => { ); if (existingApprovalSecrets.length !== data.length) { - throw new BadRequestError({ message: "Some of the secret approvals do not exist" }); + throw new NotFoundError({ message: "Some of the secret approvals do not exist" }); } if (data.length === 0) return []; @@ -47,7 +50,7 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => { const findByRequestId = async (requestId: string, tx?: Knex) => { try { - const doc = await (tx || db)({ + const doc = await (tx || db.replicaNode())({ secVerTag: TableName.SecretTag }) .from(TableName.SecretApprovalRequestSecret) @@ -78,15 +81,13 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => { .select({ secVerTagId: "secVerTag.id", secVerTagColor: "secVerTag.color", - secVerTagSlug: "secVerTag.slug", - secVerTagName: "secVerTag.name" + secVerTagSlug: "secVerTag.slug" }) .select( db.ref("id").withSchema(TableName.SecretTag).as("tagId"), db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTag).as("tagJnId"), db.ref("color").withSchema(TableName.SecretTag).as("tagColor"), - db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"), - db.ref("name").withSchema(TableName.SecretTag).as("tagName") + db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug") ) .select( db.ref("secretBlindIndex").withSchema(TableName.Secret).as("orgSecBlindIndex"), @@ -121,9 +122,9 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => { { key: "tagJnId", label: "tags" as const, - mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color }) => ({ + mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({ id, - name, + name: slug, slug, color }) @@ -197,11 +198,11 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => { { key: "secVerTagId", label: "tags" as const, - mapper: ({ secVerTagId: id, secVerTagName: name, secVerTagSlug: slug, secVerTagColor: color }) => ({ + mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({ // eslint-disable-next-line id, // eslint-disable-next-line - name, + name: slug, // eslint-disable-next-line slug, // eslint-disable-next-line @@ -221,10 +222,195 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => { throw new DatabaseError({ error, name: "FindByRequestId" }); } }; + + const findByRequestIdBridgeSecretV2 = async (requestId: string, tx?: Knex) => { + try { + const doc = await (tx || db.replicaNode())({ + secVerTag: TableName.SecretTag + }) + .from(TableName.SecretApprovalRequestSecretV2) + .where({ requestId }) + .leftJoin( + TableName.SecretApprovalRequestSecretTagV2, + `${TableName.SecretApprovalRequestSecretV2}.id`, + `${TableName.SecretApprovalRequestSecretTagV2}.secretId` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretApprovalRequestSecretTagV2}.tagId`, + `${TableName.SecretTag}.id` + ) + .leftJoin(TableName.SecretV2, `${TableName.SecretApprovalRequestSecretV2}.secretId`, `${TableName.SecretV2}.id`) + .leftJoin( + TableName.SecretVersionV2, + `${TableName.SecretVersionV2}.id`, + `${TableName.SecretApprovalRequestSecretV2}.secretVersion` + ) + .leftJoin( + TableName.SecretVersionV2Tag, + `${TableName.SecretVersionV2Tag}.${TableName.SecretVersionV2}Id`, + `${TableName.SecretVersionV2}.id` + ) + .leftJoin( + db.ref(TableName.SecretTag).as("secVerTag"), + `${TableName.SecretVersionV2Tag}.${TableName.SecretTag}Id`, + db.ref("id").withSchema("secVerTag") + ) + .select(selectAllTableCols(TableName.SecretApprovalRequestSecretV2)) + .select({ + secVerTagId: "secVerTag.id", + secVerTagColor: "secVerTag.color", + secVerTagSlug: "secVerTag.slug" + }) + .select( + db.ref("id").withSchema(TableName.SecretTag).as("tagId"), + db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTagV2).as("tagJnId"), + db.ref("color").withSchema(TableName.SecretTag).as("tagColor"), + db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug") + ) + .select( + db.ref("version").withSchema(TableName.SecretV2).as("orgSecVersion"), + db.ref("key").withSchema(TableName.SecretV2).as("orgSecKey"), + db.ref("encryptedValue").withSchema(TableName.SecretV2).as("orgSecValue"), + db.ref("encryptedComment").withSchema(TableName.SecretV2).as("orgSecComment") + ) + .select( + db.ref("version").withSchema(TableName.SecretVersionV2).as("secVerVersion"), + db.ref("key").withSchema(TableName.SecretVersionV2).as("secVerKey"), + db.ref("encryptedValue").withSchema(TableName.SecretVersionV2).as("secVerValue"), + db.ref("encryptedComment").withSchema(TableName.SecretVersionV2).as("secVerComment") + ); + const formatedDoc = sqlNestRelationships({ + data: doc, + key: "id", + parentMapper: (data) => SecretApprovalRequestsSecretsV2Schema.omit({ secretVersion: true }).parse(data), + childrenMapper: [ + { + key: "tagJnId", + label: "tags" as const, + mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({ + id, + name: slug, + slug, + color + }) + }, + { + key: "secretId", + label: "secret" as const, + mapper: ({ orgSecVersion, orgSecKey, orgSecValue, orgSecComment, secretId }) => + secretId + ? { + id: secretId, + version: orgSecVersion, + key: orgSecKey, + encryptedValue: orgSecValue, + encryptedComment: orgSecComment + } + : undefined + }, + { + key: "secretVersion", + label: "secretVersion" as const, + mapper: ({ secretVersion, secVerVersion, secVerKey, secVerValue, secVerComment }) => + secretVersion + ? { + version: secVerVersion, + id: secretVersion, + key: secVerKey, + encryptedValue: secVerValue, + encryptedComment: secVerComment + } + : undefined, + childrenMapper: [ + { + key: "secVerTagId", + label: "tags" as const, + mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({ + // eslint-disable-next-line + id, + // eslint-disable-next-line + name: slug, + // eslint-disable-next-line + slug, + // eslint-disable-next-line + color + }) + } + ] + } + ] + }); + return formatedDoc?.map(({ secret, secretVersion, ...el }) => ({ + ...el, + secret: secret?.[0], + secretVersion: secretVersion?.[0] + })); + } catch (error) { + throw new DatabaseError({ error, name: "FindByRequestId" }); + } + }; + // special query for migration to v2 secret + const findByProjectId = async (projectId: string, tx?: Knex) => { + try { + const docs = await (tx || db)(TableName.SecretApprovalRequestSecret) + .join( + TableName.SecretApprovalRequest, + `${TableName.SecretApprovalRequest}.id`, + `${TableName.SecretApprovalRequestSecret}.requestId` + ) + .join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .leftJoin( + TableName.SecretApprovalRequestSecretTag, + `${TableName.SecretApprovalRequestSecret}.id`, + `${TableName.SecretApprovalRequestSecretTag}.secretId` + ) + .where({ projectId }) + .select(selectAllTableCols(TableName.SecretApprovalRequestSecret)) + .select( + db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTag).as("secretApprovalTagId"), + db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecretTag).as("secretApprovalTagSecretId"), + db.ref("tagId").withSchema(TableName.SecretApprovalRequestSecretTag).as("secretApprovalTagSecretTagId"), + db.ref("createdAt").withSchema(TableName.SecretApprovalRequestSecretTag).as("secretApprovalTagCreatedAt"), + db.ref("updatedAt").withSchema(TableName.SecretApprovalRequestSecretTag).as("secretApprovalTagUpdatedAt") + ); + const formatedDoc = sqlNestRelationships({ + data: docs, + key: "id", + parentMapper: (data) => SecretApprovalRequestsSecretsSchema.parse(data), + childrenMapper: [ + { + key: "secretApprovalTagId", + label: "tags" as const, + mapper: ({ + secretApprovalTagSecretId, + secretApprovalTagId, + secretApprovalTagUpdatedAt, + secretApprovalTagCreatedAt + }) => ({ + secretApprovalTagSecretId, + secretApprovalTagId, + secretApprovalTagUpdatedAt, + secretApprovalTagCreatedAt + }) + } + ] + }); + return formatedDoc; + } catch (error) { + throw new DatabaseError({ error, name: "FindByRequestId" }); + } + }; + return { ...secretApprovalRequestSecretOrm, + insertV2Bridge: secretApprovalRequestSecretV2Orm.insertMany, findByRequestId, + findByRequestIdBridgeSecretV2, bulkUpdateNoVersionIncrement, - insertApprovalSecretTags: secretApprovalRequestSecretTagOrm.insertMany + findByProjectId, + insertApprovalSecretTags: secretApprovalRequestSecretTagOrm.insertMany, + insertApprovalSecretV2Tags: secretApprovalRequestSecretV2TagOrm.insertMany }; }; diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts index 5d09771347..a39f44fd62 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts @@ -5,17 +5,25 @@ import { SecretEncryptionAlgo, SecretKeyEncoding, SecretType, - TSecretApprovalRequestsSecretsInsert + TSecretApprovalRequestsSecretsInsert, + TSecretApprovalRequestsSecretsV2Insert } from "@app/db/schemas"; +import { getConfig } from "@app/lib/config/env"; import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy, pick, unique } from "@app/lib/fn"; +import { setKnexStringValue } from "@app/lib/knex"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { EnforcementLevel } from "@app/lib/types"; import { ActorType } from "@app/services/auth/auth-type"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { KmsDataKey } from "@app/services/kms/kms-types"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; +import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal"; import { TSecretDALFactory } from "@app/services/secret/secret-dal"; import { + decryptSecretWithBot, fnSecretBlindIndexCheck, fnSecretBlindIndexCheckV2, fnSecretBulkDelete, @@ -30,11 +38,28 @@ import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal"; import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; +import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; +import { + fnSecretBulkDelete as fnSecretV2BridgeBulkDelete, + fnSecretBulkInsert as fnSecretV2BridgeBulkInsert, + fnSecretBulkUpdate as fnSecretV2BridgeBulkUpdate, + getAllSecretReferences as getAllSecretReferencesV2Bridge +} from "@app/services/secret-v2-bridge/secret-v2-bridge-fns"; +import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal"; +import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal"; +import { triggerSlackNotification } from "@app/services/slack/slack-fns"; +import { SlackTriggerFeature } from "@app/services/slack/slack-types"; +import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; +import { TUserDALFactory } from "@app/services/user/user-dal"; +import { TLicenseServiceFactory } from "../license/license-service"; import { TPermissionServiceFactory } from "../permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission"; +import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal"; import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service"; import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal"; +import { sendApprovalEmailsFn } from "./secret-approval-request-fns"; import { TSecretApprovalRequestReviewerDALFactory } from "./secret-approval-request-reviewer-dal"; import { TSecretApprovalRequestSecretDALFactory } from "./secret-approval-request-secret-dal"; import { @@ -42,6 +67,7 @@ import { RequestState, TApprovalRequestCountDTO, TGenerateSecretApprovalRequestDTO, + TGenerateSecretApprovalRequestV2BridgeDTO, TListApprovalsDTO, TMergeSecretApprovalRequestDTO, TReviewRequestDTO, @@ -57,13 +83,32 @@ type TSecretApprovalRequestServiceFactoryDep = { secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory; folderDAL: Pick; secretDAL: TSecretDALFactory; - secretTagDAL: Pick; + secretTagDAL: Pick< + TSecretTagDALFactory, + "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "saveTagsToSecretV2" | "deleteTagsToSecretV2" + >; secretBlindIndexDAL: Pick; snapshotService: Pick; secretVersionDAL: Pick; secretVersionTagDAL: Pick; - projectDAL: Pick; + smtpService: Pick; + userDAL: Pick; + projectEnvDAL: Pick; + projectDAL: Pick< + TProjectDALFactory, + "checkProjectUpgradeStatus" | "findById" | "findProjectById" | "findProjectWithOrg" + >; secretQueueService: Pick; + kmsService: Pick; + secretV2BridgeDAL: Pick< + TSecretV2BridgeDALFactory, + "insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" + >; + secretVersionV2BridgeDAL: Pick; + secretVersionTagV2BridgeDAL: Pick; + secretApprovalPolicyDAL: Pick; + projectSlackConfigDAL: Pick; + licenseService: Pick; }; export type TSecretApprovalRequestServiceFactory = ReturnType; @@ -82,12 +127,22 @@ export const secretApprovalRequestServiceFactory = ({ snapshotService, secretVersionDAL, secretQueueService, - projectBotService + projectBotService, + smtpService, + userDAL, + projectEnvDAL, + secretApprovalPolicyDAL, + kmsService, + secretV2BridgeDAL, + secretVersionV2BridgeDAL, + secretVersionTagV2BridgeDAL, + licenseService, + projectSlackConfigDAL }: TSecretApprovalRequestServiceFactoryDep) => { const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => { if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" }); - const { membership } = await permissionService.getProjectPermission( + await permissionService.getProjectPermission( actor as ActorType.USER, actorId, projectId, @@ -95,7 +150,7 @@ export const secretApprovalRequestServiceFactory = ({ actorOrgId ); - const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, membership.id); + const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, actorId); return count; }; @@ -113,19 +168,26 @@ export const secretApprovalRequestServiceFactory = ({ }: TListApprovalsDTO) => { if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" }); - const { membership } = await permissionService.getProjectPermission( - actor, - actorId, - projectId, - actorAuthMethod, - actorOrgId - ); + await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId); + + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + return secretApprovalRequestDAL.findByProjectIdBridgeSecretV2({ + projectId, + committer, + environment, + status, + userId: actorId, + limit, + offset + }); + } const approvals = await secretApprovalRequestDAL.findByProjectId({ projectId, committer, environment, status, - membershipId: membership.id, + userId: actorId, limit, offset }); @@ -142,25 +204,96 @@ export const secretApprovalRequestServiceFactory = ({ if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" }); const secretApprovalRequest = await secretApprovalRequestDAL.findById(id); - if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" }); + if (!secretApprovalRequest) + throw new NotFoundError({ message: `Secret approval request with ID '${id}' not found` }); + + const { projectId } = secretApprovalRequest; + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); const { policy } = secretApprovalRequest; - const { membership, hasRole } = await permissionService.getProjectPermission( + const { hasRole } = await permissionService.getProjectPermission( actor, actorId, - secretApprovalRequest.projectId, + projectId, actorAuthMethod, actorOrgId ); if ( !hasRole(ProjectMembershipRole.Admin) && - secretApprovalRequest.committerId !== membership.id && - !policy.approvers.find((approverId) => approverId === membership.id) + secretApprovalRequest.committerUserId !== actorId && + !policy.approvers.find(({ userId }) => userId === actorId) ) { - throw new UnauthorizedError({ message: "User has no access" }); + throw new ForbiddenRequestError({ message: "User has insufficient privileges" }); } - const secrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id); + let secrets; + if (shouldUseSecretV2Bridge) { + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + const encrypedSecrets = await secretApprovalRequestSecretDAL.findByRequestIdBridgeSecretV2( + secretApprovalRequest.id + ); + secrets = encrypedSecrets.map((el) => ({ + ...el, + secretKey: el.key, + id: el.id, + version: el.version, + secretValue: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "", + secretComment: el.encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() + : "", + secret: el.secret + ? { + secretKey: el.secret.key, + id: el.secret.id, + version: el.secret.version, + secretValue: el.secret.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString() + : "", + secretComment: el.secret.encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedComment }).toString() + : "" + } + : undefined, + secretVersion: el.secretVersion + ? { + secretKey: el.secretVersion.key, + id: el.secretVersion.id, + version: el.secretVersion.version, + secretValue: el.secretVersion.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString() + : "", + secretComment: el.secretVersion.encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString() + : "", + tags: el.secretVersion.tags + } + : undefined + })); + } else { + if (!botKey) throw new NotFoundError({ message: `Project bot key not found`, name: "BotKeyNotFound" }); // CLI depends on this error message. TODO(daniel): Make API check for name BotKeyNotFound instead of message + const encrypedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id); + secrets = encrypedSecrets.map((el) => ({ + ...el, + ...decryptSecretWithBot(el, botKey), + secret: el.secret + ? { + id: el.secret.id, + version: el.secret.version, + ...decryptSecretWithBot(el.secret, botKey) + } + : undefined, + secretVersion: el.secretVersion + ? { + id: el.secretVersion.id, + version: el.secretVersion.version, + ...decryptSecretWithBot(el.secretVersion, botKey) + } + : undefined + })); + } const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [ secretApprovalRequest.folderId ]); @@ -176,11 +309,21 @@ export const secretApprovalRequestServiceFactory = ({ actorOrgId }: TReviewRequestDTO) => { const secretApprovalRequest = await secretApprovalRequestDAL.findById(approvalId); - if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" }); + if (!secretApprovalRequest) { + throw new NotFoundError({ message: `Secret approval request with ID '${approvalId}' not found` }); + } if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" }); + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.secretApproval) { + throw new BadRequestError({ + message: + "Failed to review secret approval request due to plan restriction. Upgrade plan to review secret approval request." + }); + } + const { policy } = secretApprovalRequest; - const { membership, hasRole } = await permissionService.getProjectPermission( + const { hasRole } = await permissionService.getProjectPermission( ActorType.USER, actorId, secretApprovalRequest.projectId, @@ -189,16 +332,16 @@ export const secretApprovalRequestServiceFactory = ({ ); if ( !hasRole(ProjectMembershipRole.Admin) && - secretApprovalRequest.committerId !== membership.id && - !policy.approvers.find((approverId) => approverId === membership.id) + secretApprovalRequest.committerUserId !== actorId && + !policy.approvers.find(({ userId }) => userId === actorId) ) { - throw new UnauthorizedError({ message: "User has no access" }); + throw new ForbiddenRequestError({ message: "User has insufficient privileges" }); } const reviewStatus = await secretApprovalRequestReviewerDAL.transaction(async (tx) => { const review = await secretApprovalRequestReviewerDAL.findOne( { requestId: secretApprovalRequest.id, - member: membership.id + reviewerUserId: actorId }, tx ); @@ -207,7 +350,7 @@ export const secretApprovalRequestServiceFactory = ({ { status, requestId: secretApprovalRequest.id, - member: membership.id + reviewerUserId: actorId }, tx ); @@ -226,11 +369,21 @@ export const secretApprovalRequestServiceFactory = ({ actorAuthMethod }: TStatusChangeDTO) => { const secretApprovalRequest = await secretApprovalRequestDAL.findById(approvalId); - if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" }); + if (!secretApprovalRequest) { + throw new NotFoundError({ message: `Secret approval request with ID '${approvalId}' not found` }); + } if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" }); + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.secretApproval) { + throw new BadRequestError({ + message: + "Failed to update secret approval request due to plan restriction. Upgrade plan to update secret approval request." + }); + } + const { policy } = secretApprovalRequest; - const { membership, hasRole } = await permissionService.getProjectPermission( + const { hasRole } = await permissionService.getProjectPermission( ActorType.USER, actorId, secretApprovalRequest.projectId, @@ -239,10 +392,10 @@ export const secretApprovalRequestServiceFactory = ({ ); if ( !hasRole(ProjectMembershipRole.Admin) && - secretApprovalRequest.committerId !== membership.id && - !policy.approvers.find((approverId) => approverId === membership.id) + secretApprovalRequest.committerUserId !== actorId && + !policy.approvers.find(({ userId }) => userId === actorId) ) { - throw new UnauthorizedError({ message: "User has no access" }); + throw new ForbiddenRequestError({ message: "User has insufficient privileges" }); } if (secretApprovalRequest.hasMerged) throw new BadRequestError({ message: "Approval request has been merged" }); @@ -253,7 +406,7 @@ export const secretApprovalRequestServiceFactory = ({ const updatedRequest = await secretApprovalRequestDAL.updateById(secretApprovalRequest.id, { status, - statusChangeBy: membership.id + statusChangedByUserId: actorId }); return { ...secretApprovalRequest, ...updatedRequest }; }; @@ -263,14 +416,24 @@ export const secretApprovalRequestServiceFactory = ({ actor, actorId, actorOrgId, - actorAuthMethod + actorAuthMethod, + bypassReason }: TMergeSecretApprovalRequestDTO) => { const secretApprovalRequest = await secretApprovalRequestDAL.findById(approvalId); - if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" }); + if (!secretApprovalRequest) + throw new NotFoundError({ message: `Secret approval request with ID '${approvalId}' not found` }); if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" }); + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.secretApproval) { + throw new BadRequestError({ + message: + "Failed to merge secret approval request due to plan restriction. Upgrade plan to merge secret approval request." + }); + } + const { policy, folderId, projectId } = secretApprovalRequest; - const { membership, hasRole } = await permissionService.getProjectPermission( + const { hasRole } = await permissionService.getProjectPermission( ActorType.USER, actorId, projectId, @@ -280,140 +443,242 @@ export const secretApprovalRequestServiceFactory = ({ if ( !hasRole(ProjectMembershipRole.Admin) && - secretApprovalRequest.committerId !== membership.id && - !policy.approvers.find((approverId) => approverId === membership.id) + secretApprovalRequest.committerUserId !== actorId && + !policy.approvers.find(({ userId }) => userId === actorId) ) { - throw new UnauthorizedError({ message: "User has no access" }); + throw new ForbiddenRequestError({ message: "User has insufficient privileges" }); } const reviewers = secretApprovalRequest.reviewers.reduce>( - (prev, curr) => ({ ...prev, [curr.member.toString()]: curr.status as ApprovalStatus }), + (prev, curr) => ({ ...prev, [curr.userId.toString()]: curr.status as ApprovalStatus }), {} ); const hasMinApproval = secretApprovalRequest.policy.approvals <= - secretApprovalRequest.policy.approvers.filter( - (approverId) => reviewers[approverId.toString()] === ApprovalStatus.APPROVED + secretApprovalRequest.policy.approvers.filter(({ userId: approverId }) => + approverId ? reviewers[approverId] === ApprovalStatus.APPROVED : false ).length; + const isSoftEnforcement = secretApprovalRequest.policy.enforcementLevel === EnforcementLevel.Soft; - if (!hasMinApproval) throw new BadRequestError({ message: "Doesn't have minimum approvals needed" }); - const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id); - if (!secretApprovalSecrets) throw new BadRequestError({ message: "No secrets found" }); + if (!hasMinApproval && !isSoftEnforcement) + throw new BadRequestError({ message: "Doesn't have minimum approvals needed" }); - const conflicts: Array<{ secretId: string; op: SecretOperations }> = []; - let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create); - if (secretCreationCommits.length) { - const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({ - folderId, - secretDAL, - inputSecrets: secretCreationCommits.map(({ secretBlindIndex }) => { - if (!secretBlindIndex) { - throw new BadRequestError({ - message: "Missing secret blind index" - }); - } - return { secretBlindIndex }; - }) - }); - secretCreationCommits - .filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""]) - .forEach((el) => { - conflicts.push({ op: SecretOperations.Create, secretId: el.id }); - }); - secretCreationCommits = secretCreationCommits.filter( - ({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""] + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + let mergeStatus; + if (shouldUseSecretV2Bridge) { + // this cycle if for bridged secrets + const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestIdBridgeSecretV2( + secretApprovalRequest.id ); - } + if (!secretApprovalSecrets) { + throw new NotFoundError({ message: `No secrets found in secret change request with ID '${approvalId}'` }); + } - let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update); - if (secretUpdationCommits.length) { - const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({ - folderId, - secretDAL, - userId: "", - inputSecrets: secretUpdationCommits - .filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex) - .map(({ secretBlindIndex }) => { + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const conflicts: Array<{ secretId: string; op: SecretOperations }> = []; + let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create); + if (secretCreationCommits.length) { + const secrets = await secretV2BridgeDAL.findBySecretKeys( + folderId, + secretCreationCommits.map((el) => ({ + key: el.key, + type: SecretType.Shared + })) + ); + const creationConflictSecretsGroupByKey = groupBy(secrets, (i) => i.key); + secretCreationCommits + .filter(({ key }) => creationConflictSecretsGroupByKey[key]) + .forEach((el) => { + conflicts.push({ op: SecretOperations.Create, secretId: el.id }); + }); + secretCreationCommits = secretCreationCommits.filter(({ key }) => !creationConflictSecretsGroupByKey[key]); + } + + let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update); + if (secretUpdationCommits.length) { + const secrets = await secretV2BridgeDAL.findBySecretKeys( + folderId, + secretCreationCommits.map((el) => ({ + key: el.key, + type: SecretType.Shared + })) + ); + const updationConflictSecretsGroupByKey = groupBy(secrets, (i) => i.key); + secretUpdationCommits + .filter(({ key, secretId }) => updationConflictSecretsGroupByKey[key] || !secretId) + .forEach((el) => { + conflicts.push({ op: SecretOperations.Update, secretId: el.id }); + }); + + secretUpdationCommits = secretUpdationCommits.filter( + ({ key, secretId }) => Boolean(secretId) && !updationConflictSecretsGroupByKey[key] + ); + } + + const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete); + mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => { + const newSecrets = secretCreationCommits.length + ? await fnSecretV2BridgeBulkInsert({ + tx, + folderId, + inputSecrets: secretCreationCommits.map((el) => ({ + tagIds: el?.tags.map(({ id }) => id), + version: 1, + encryptedComment: el.encryptedComment, + encryptedValue: el.encryptedValue, + skipMultilineEncoding: el.skipMultilineEncoding, + key: el.key, + references: el.encryptedValue + ? getAllSecretReferencesV2Bridge( + secretManagerDecryptor({ + cipherTextBlob: el.encryptedValue + }).toString() + ).nestedReferences + : [], + type: SecretType.Shared + })), + secretDAL: secretV2BridgeDAL, + secretVersionDAL: secretVersionV2BridgeDAL, + secretTagDAL, + secretVersionTagDAL: secretVersionTagV2BridgeDAL + }) + : []; + const updatedSecrets = secretUpdationCommits.length + ? await fnSecretV2BridgeBulkUpdate({ + folderId, + tx, + inputSecrets: secretUpdationCommits.map((el) => { + const encryptedValue = + typeof el.encryptedValue !== "undefined" + ? { + encryptedValue: el.encryptedValue as Buffer, + references: el.encryptedValue + ? getAllSecretReferencesV2Bridge( + secretManagerDecryptor({ + cipherTextBlob: el.encryptedValue + }).toString() + ).nestedReferences + : [] + } + : {}; + return { + filter: { id: el.secretId as string, type: SecretType.Shared }, + data: { + reminderRepeatDays: el.reminderRepeatDays, + encryptedComment: el.encryptedComment, + reminderNote: el.reminderNote, + skipMultilineEncoding: el.skipMultilineEncoding, + key: el.key, + tags: el?.tags.map(({ id }) => id), + ...encryptedValue + } + }; + }), + secretDAL: secretV2BridgeDAL, + secretVersionDAL: secretVersionV2BridgeDAL, + secretTagDAL, + secretVersionTagDAL: secretVersionTagV2BridgeDAL + }) + : []; + const deletedSecret = secretDeletionCommits.length + ? await fnSecretV2BridgeBulkDelete({ + projectId, + folderId, + tx, + actorId: "", + secretDAL: secretV2BridgeDAL, + secretQueueService, + inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared })) + }) + : []; + const updatedSecretApproval = await secretApprovalRequestDAL.updateById( + secretApprovalRequest.id, + { + conflicts: JSON.stringify(conflicts), + hasMerged: true, + status: RequestState.Closed, + statusChangedByUserId: actorId + }, + tx + ); + return { + secrets: { created: newSecrets, updated: updatedSecrets, deleted: deletedSecret }, + approval: updatedSecretApproval + }; + }); + } else { + const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id); + if (!secretApprovalSecrets) { + throw new NotFoundError({ message: `No secrets found in secret change request with ID '${approvalId}'` }); + } + + const conflicts: Array<{ secretId: string; op: SecretOperations }> = []; + let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create); + if (secretCreationCommits.length) { + const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({ + folderId, + secretDAL, + inputSecrets: secretCreationCommits.map(({ secretBlindIndex, secret }) => { if (!secretBlindIndex) { - throw new BadRequestError({ - message: "Missing secret blind index" + throw new NotFoundError({ + message: `Secret blind index not found on secret with ID '${secret.id}` }); } return { secretBlindIndex }; }) - }); - secretUpdationCommits - .filter( - ({ secretBlindIndex, secretId }) => - (secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId - ) - .forEach((el) => { - conflicts.push({ op: SecretOperations.Update, secretId: el.id }); }); + secretCreationCommits + .filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""]) + .forEach((el) => { + conflicts.push({ op: SecretOperations.Create, secretId: el.id }); + }); + secretCreationCommits = secretCreationCommits.filter( + ({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""] + ); + } - secretUpdationCommits = secretUpdationCommits.filter( - ({ secretBlindIndex, secretId }) => - Boolean(secretId) && (secretBlindIndex ? !conflictGroupByBlindIndex[secretBlindIndex] : true) - ); - } + let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update); + if (secretUpdationCommits.length) { + const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({ + folderId, + secretDAL, + userId: "", + inputSecrets: secretUpdationCommits + .filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex) + .map(({ secretBlindIndex, secret }) => { + if (!secretBlindIndex) { + throw new NotFoundError({ + message: `Secret blind index not found on secret with ID '${secret.id}` + }); + } + return { secretBlindIndex }; + }) + }); + secretUpdationCommits + .filter( + ({ secretBlindIndex, secretId }) => + (secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId + ) + .forEach((el) => { + conflicts.push({ op: SecretOperations.Update, secretId: el.id }); + }); - const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete); - const botKey = await projectBotService.getBotKey(projectId).catch(() => null); - const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => { - const newSecrets = secretCreationCommits.length - ? await fnSecretBulkInsert({ - tx, - folderId, - inputSecrets: secretCreationCommits.map((el) => ({ - ...pick(el, [ - "secretCommentCiphertext", - "secretCommentTag", - "secretCommentIV", - "secretValueIV", - "secretValueTag", - "secretValueCiphertext", - "secretKeyCiphertext", - "secretKeyTag", - "secretKeyIV", - "metadata", - "skipMultilineEncoding", - "secretReminderNote", - "secretReminderRepeatDays", - "algorithm", - "keyEncoding", - "secretBlindIndex" - ]), - tags: el?.tags.map(({ id }) => id), - version: 1, - type: SecretType.Shared, - references: botKey - ? getAllNestedSecretReferences( - decryptSymmetric128BitHexKeyUTF8({ - ciphertext: el.secretValueCiphertext, - iv: el.secretValueIV, - tag: el.secretValueTag, - key: botKey - }) - ) - : undefined - })), - secretDAL, - secretVersionDAL, - secretTagDAL, - secretVersionTagDAL - }) - : []; - const updatedSecrets = secretUpdationCommits.length - ? await fnSecretBulkUpdate({ - folderId, - projectId, - tx, - inputSecrets: secretUpdationCommits.map((el) => ({ - filter: { - id: el.secretId as string, // this null check is already checked at top on conflict strategy - type: SecretType.Shared - }, - data: { - tags: el?.tags.map(({ id }) => id), + secretUpdationCommits = secretUpdationCommits.filter( + ({ secretBlindIndex, secretId }) => + Boolean(secretId) && (secretBlindIndex ? !conflictGroupByBlindIndex[secretBlindIndex] : true) + ); + } + + const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete); + mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => { + const newSecrets = secretCreationCommits.length + ? await fnSecretBulkInsert({ + tx, + folderId, + inputSecrets: secretCreationCommits.map((el) => ({ ...pick(el, [ "secretCommentCiphertext", "secretCommentTag", @@ -428,8 +693,13 @@ export const secretApprovalRequestServiceFactory = ({ "skipMultilineEncoding", "secretReminderNote", "secretReminderRepeatDays", + "algorithm", + "keyEncoding", "secretBlindIndex" ]), + tags: el?.tags.map(({ id }) => id), + version: 1, + type: SecretType.Shared, references: botKey ? getAllNestedSecretReferences( decryptSymmetric128BitHexKeyUTF8({ @@ -440,50 +710,99 @@ export const secretApprovalRequestServiceFactory = ({ }) ) : undefined - } - })), - secretDAL, - secretVersionDAL, - secretTagDAL, - secretVersionTagDAL - }) - : []; - const deletedSecret = secretDeletionCommits.length - ? await fnSecretBulkDelete({ - projectId, - folderId, - tx, - actorId: "", - secretDAL, - secretQueueService, - inputSecrets: secretDeletionCommits.map(({ secretBlindIndex }) => { - if (!secretBlindIndex) { - throw new BadRequestError({ - message: "Missing secret blind index" - }); - } - return { secretBlindIndex, type: SecretType.Shared }; + })), + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL }) - }) - : []; - const updatedSecretApproval = await secretApprovalRequestDAL.updateById( - secretApprovalRequest.id, - { - conflicts: JSON.stringify(conflicts), - hasMerged: true, - status: RequestState.Closed, - statusChangeBy: membership.id - }, - tx - ); - return { - secrets: { created: newSecrets, updated: updatedSecrets, deleted: deletedSecret }, - approval: updatedSecretApproval - }; - }); + : []; + const updatedSecrets = secretUpdationCommits.length + ? await fnSecretBulkUpdate({ + folderId, + projectId, + tx, + inputSecrets: secretUpdationCommits.map((el) => ({ + filter: { + id: el.secretId as string, // this null check is already checked at top on conflict strategy + type: SecretType.Shared + }, + data: { + tags: el?.tags.map(({ id }) => id), + ...pick(el, [ + "secretCommentCiphertext", + "secretCommentTag", + "secretCommentIV", + "secretValueIV", + "secretValueTag", + "secretValueCiphertext", + "secretKeyCiphertext", + "secretKeyTag", + "secretKeyIV", + "metadata", + "skipMultilineEncoding", + "secretReminderNote", + "secretReminderRepeatDays", + "secretBlindIndex" + ]), + references: botKey + ? getAllNestedSecretReferences( + decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretValueCiphertext, + iv: el.secretValueIV, + tag: el.secretValueTag, + key: botKey + }) + ) + : undefined + } + })), + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL + }) + : []; + const deletedSecret = secretDeletionCommits.length + ? await fnSecretBulkDelete({ + projectId, + folderId, + tx, + actorId: "", + secretDAL, + secretQueueService, + inputSecrets: secretDeletionCommits.map(({ secretBlindIndex, secret }) => { + if (!secretBlindIndex) { + throw new NotFoundError({ + message: `Secret blind index not found on secret with ID '${secret.id}` + }); + } + return { secretBlindIndex, type: SecretType.Shared }; + }) + }) + : []; + const updatedSecretApproval = await secretApprovalRequestDAL.updateById( + secretApprovalRequest.id, + { + conflicts: JSON.stringify(conflicts), + hasMerged: true, + status: RequestState.Closed, + statusChangedByUserId: actorId + }, + tx + ); + return { + secrets: { created: newSecrets, updated: updatedSecrets, deleted: deletedSecret }, + approval: updatedSecretApproval + }; + }); + } + await snapshotService.performSnapshot(folderId); const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) { + throw new NotFoundError({ message: `Folder with ID '${folderId}' not found in project with ID '${projectId}'` }); + } await secretQueueService.syncSecrets({ projectId, secretPath: folder.path, @@ -491,6 +810,35 @@ export const secretApprovalRequestServiceFactory = ({ actorId, actor }); + + if (isSoftEnforcement) { + const cfg = getConfig(); + const project = await projectDAL.findProjectById(projectId); + const env = await projectEnvDAL.findOne({ id: policy.envId }); + const requestedByUser = await userDAL.findOne({ id: actorId }); + const approverUsers = await userDAL.find({ + $in: { + id: policy.approvers.map((approver: { userId: string | null | undefined }) => approver.userId!) + } + }); + + await smtpService.sendMail({ + recipients: approverUsers.filter((approver) => approver.email).map((approver) => approver.email!), + subjectLine: "Infisical Secret Change Policy Bypassed", + + substitutions: { + projectName: project.name, + requesterFullName: `${requestedByUser.firstName} ${requestedByUser.lastName}`, + requesterEmail: requestedByUser.email, + bypassReason, + secretPath: policy.secretPath, + environment: env.name, + approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval` + }, + template: SmtpTemplates.AccessSecretRequestBypassed + }); + } + return mergeStatus; }; @@ -509,7 +857,7 @@ export const secretApprovalRequestServiceFactory = ({ }: TGenerateSecretApprovalRequestDTO) => { if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" }); - const { permission, membership } = await permissionService.getProjectPermission( + const { permission } = await permissionService.getProjectPermission( actor, actorId, projectId, @@ -525,15 +873,19 @@ export const secretApprovalRequestServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", + throw new NotFoundError({ + message: `Folder not found for environment with slug '${environment}' & secret path '${secretPath}'`, name: "GenSecretApproval" }); const folderId = folder.id; const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Update secret" }); - + if (!blindIndexCfg) { + throw new NotFoundError({ + message: `Blind index not found for project with ID '${projectId}'`, + name: "Update secret" + }); + } const commits: Omit[] = []; const commitTagIds: Record = {}; // for created secret approval change @@ -626,7 +978,9 @@ export const secretApprovalRequestServiceFactory = ({ secretDAL }); const secretsGroupedByBlindIndex = groupBy(secrets, (i) => { - if (!i.secretBlindIndex) throw new BadRequestError({ message: "Missing secret blind index" }); + if (!i.secretBlindIndex) { + throw new NotFoundError({ message: `Secret blind index not found for secret with ID '${i.id}'` }); + } return i.secretBlindIndex; }); const deletedSecretIds = deletedSecrets.map( @@ -637,7 +991,7 @@ export const secretApprovalRequestServiceFactory = ({ ...deletedSecrets.map((el) => { const secretId = secretsGroupedByBlindIndex[keyName2BlindIndex[el.secretName]][0].id; if (!latestSecretVersions[secretId].secretBlindIndex) - throw new BadRequestError({ message: "Failed to find secret blind index" }); + throw new NotFoundError({ message: `Secret blind index not found for secret with ID '${secretId}'` }); return { op: SecretOperations.Delete as const, ...latestSecretVersions[secretId], @@ -653,7 +1007,7 @@ export const secretApprovalRequestServiceFactory = ({ const tagIds = unique(Object.values(commitTagIds).flat()); const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; - if (tagIds.length !== tags.length) throw new BadRequestError({ message: "Tag not found" }); + if (tagIds.length !== tags.length) throw new NotFoundError({ message: "One or more tags not found" }); const secretApprovalRequest = await secretApprovalRequestDAL.transaction(async (tx) => { const doc = await secretApprovalRequestDAL.create( @@ -663,7 +1017,7 @@ export const secretApprovalRequestServiceFactory = ({ policyId: policy.id, status: "open", hasMerged: false, - committerId: membership.id + committerUserId: actorId }, tx ); @@ -719,7 +1073,7 @@ export const secretApprovalRequestServiceFactory = ({ const commitsGroupByBlindIndex = groupBy(approvalCommits, (i) => { if (!i.secretBlindIndex) { - throw new BadRequestError({ message: "Missing secret blind index" }); + throw new NotFoundError({ message: `Secret blind index not found for secret with ID '${i.id}'` }); } return i.secretBlindIndex; }); @@ -738,10 +1092,333 @@ export const secretApprovalRequestServiceFactory = ({ } return { ...doc, commits: approvalCommits }; }); + + const env = await projectEnvDAL.findOne({ id: policy.envId }); + const user = await userDAL.findById(secretApprovalRequest.committerUserId); + await triggerSlackNotification({ + projectId, + projectDAL, + kmsService, + projectSlackConfigDAL, + notification: { + type: SlackTriggerFeature.SECRET_APPROVAL, + payload: { + userEmail: user.email as string, + environment: env.name, + secretPath, + projectId, + requestId: secretApprovalRequest.id + } + } + }); + + await sendApprovalEmailsFn({ + projectDAL, + secretApprovalPolicyDAL, + secretApprovalRequest, + smtpService, + projectId + }); + return secretApprovalRequest; }; + + const generateSecretApprovalRequestV2Bridge = async ({ + data, + actorId, + actor, + actorOrgId, + actorAuthMethod, + policy, + projectId, + secretPath, + environment + }: TGenerateSecretApprovalRequestV2BridgeDTO) => { + if (actor === ActorType.SERVICE || actor === ActorType.Machine) + throw new BadRequestError({ message: "Cannot use service token or machine token over protected branches" }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder not found for the environment slug '${environment}' & secret path '${secretPath}'`, + name: "GenSecretApproval" + }); + const folderId = folder.id; + + const commits: Omit[] = []; + const commitTagIds: Record = {}; + + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + // for created secret approval change + const createdSecrets = data[SecretOperations.Create]; + if (createdSecrets && createdSecrets?.length) { + const secrets = await secretV2BridgeDAL.findBySecretKeys( + folderId, + createdSecrets.map((el) => ({ + key: el.secretKey, + type: SecretType.Shared + })) + ); + if (secrets.length) + throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` }); + + commits.push( + ...createdSecrets.map((createdSecret) => ({ + op: SecretOperations.Create, + version: 1, + encryptedComment: setKnexStringValue( + createdSecret.secretComment, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + encryptedValue: setKnexStringValue( + createdSecret.secretValue, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + skipMultilineEncoding: createdSecret.skipMultilineEncoding, + key: createdSecret.secretKey, + type: SecretType.Shared + })) + ); + createdSecrets.forEach(({ tagIds, secretKey }) => { + if (tagIds?.length) commitTagIds[secretKey] = tagIds; + }); + } + // not secret approval for update operations + const secretsToUpdate = data[SecretOperations.Update]; + if (secretsToUpdate && secretsToUpdate?.length) { + const secretsToUpdateStoredInDB = await secretV2BridgeDAL.findBySecretKeys( + folderId, + secretsToUpdate.map((el) => ({ + key: el.secretKey, + type: SecretType.Shared + })) + ); + if (secretsToUpdateStoredInDB.length !== secretsToUpdate.length) + throw new NotFoundError({ + message: `Secret does not exist: ${secretsToUpdateStoredInDB.map((el) => el.key).join(",")}` + }); + + // now find any secret that needs to update its name + // same process as above + const secretsWithNewName = secretsToUpdate.filter(({ newSecretName }) => Boolean(newSecretName)); + if (secretsWithNewName.length) { + const secrets = await secretV2BridgeDAL.findBySecretKeys( + folderId, + secretsWithNewName.map((el) => ({ + key: el.secretKey, + type: SecretType.Shared + })) + ); + if (secrets.length) + throw new NotFoundError({ + message: `Secret does not exist: ${secretsToUpdateStoredInDB.map((el) => el.key).join(",")}` + }); + } + + const updatingSecretsGroupByKey = groupBy(secretsToUpdateStoredInDB, (el) => el.key); + const latestSecretVersions = await secretVersionV2BridgeDAL.findLatestVersionMany( + folderId, + secretsToUpdateStoredInDB.map(({ id }) => id) + ); + commits.push( + ...secretsToUpdate.map( + ({ + newSecretName, + secretKey, + tagIds, + secretValue, + reminderRepeatDays, + reminderNote, + secretComment, + metadata, + skipMultilineEncoding + }) => { + const secretId = updatingSecretsGroupByKey[secretKey][0].id; + if (tagIds?.length) commitTagIds[secretKey] = tagIds; + return { + ...latestSecretVersions[secretId], + key: newSecretName || secretKey, + encryptedComment: setKnexStringValue( + secretComment, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + encryptedValue: setKnexStringValue( + secretValue, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + reminderRepeatDays, + reminderNote, + metadata, + skipMultilineEncoding, + op: SecretOperations.Update as const, + secret: secretId, + secretVersion: latestSecretVersions[secretId].id, + version: updatingSecretsGroupByKey[secretKey][0].version || 1 + }; + } + ) + ); + } + // deleted secrets + const deletedSecrets = data[SecretOperations.Delete]; + if (deletedSecrets && deletedSecrets.length) { + const secretsToDeleteInDB = await secretV2BridgeDAL.findBySecretKeys( + folderId, + deletedSecrets.map((el) => ({ + key: el.secretKey, + type: SecretType.Shared + })) + ); + if (secretsToDeleteInDB.length !== deletedSecrets.length) + throw new NotFoundError({ + message: `Secret does not exist: ${secretsToDeleteInDB.map((el) => el.key).join(",")}` + }); + const secretsGroupedByKey = groupBy(secretsToDeleteInDB, (i) => i.key); + const deletedSecretIds = deletedSecrets.map((el) => secretsGroupedByKey[el.secretKey][0].id); + const latestSecretVersions = await secretVersionV2BridgeDAL.findLatestVersionMany(folderId, deletedSecretIds); + commits.push( + ...deletedSecrets.map(({ secretKey }) => { + const secretId = secretsGroupedByKey[secretKey][0].id; + return { + op: SecretOperations.Delete as const, + ...latestSecretVersions[secretId], + key: secretKey, + secret: secretId, + secretVersion: latestSecretVersions[secretId].id + }; + }) + ); + } + + if (!commits.length) throw new BadRequestError({ message: "Empty commits" }); + + const tagIds = unique(Object.values(commitTagIds).flat()); + const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; + if (tagIds.length !== tags.length) throw new NotFoundError({ message: "Tag not found" }); + const tagsGroupById = groupBy(tags, (i) => i.id); + + commits.forEach((commit) => { + let action = ProjectPermissionActions.Create; + if (commit.op === SecretOperations.Update) action = ProjectPermissionActions.Edit; + if (commit.op === SecretOperations.Delete) action = ProjectPermissionActions.Delete; + + ForbiddenError.from(permission).throwUnlessCan( + action, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: commit.key, + secretTags: commitTagIds?.[commit.key]?.map((secretTagId) => tagsGroupById[secretTagId][0].slug) + }) + ); + }); + + const secretApprovalRequest = await secretApprovalRequestDAL.transaction(async (tx) => { + const doc = await secretApprovalRequestDAL.create( + { + folderId, + slug: alphaNumericNanoId(), + policyId: policy.id, + status: "open", + hasMerged: false, + committerUserId: actorId + }, + tx + ); + const approvalCommits = await secretApprovalRequestSecretDAL.insertV2Bridge( + commits.map( + ({ + version, + op, + key, + encryptedComment, + skipMultilineEncoding, + metadata, + reminderNote, + reminderRepeatDays, + encryptedValue, + secretId, + secretVersion + }) => ({ + version, + requestId: doc.id, + op, + secretId, + metadata, + secretVersion, + skipMultilineEncoding, + encryptedValue, + reminderRepeatDays, + reminderNote, + encryptedComment, + key + }) + ), + tx + ); + + const commitsGroupByKey = groupBy(approvalCommits, (i) => i.key); + if (tagIds.length) { + await secretApprovalRequestSecretDAL.insertApprovalSecretV2Tags( + Object.keys(commitTagIds).flatMap((blindIndex) => + commitTagIds[blindIndex] + ? commitTagIds[blindIndex].map((tagId) => ({ + secretId: commitsGroupByKey[blindIndex][0].id, + tagId + })) + : [] + ), + tx + ); + } + + return { ...doc, commits: approvalCommits }; + }); + + const user = await userDAL.findById(secretApprovalRequest.committerUserId); + const env = await projectEnvDAL.findOne({ id: policy.envId }); + await triggerSlackNotification({ + projectId, + projectDAL, + kmsService, + projectSlackConfigDAL, + notification: { + type: SlackTriggerFeature.SECRET_APPROVAL, + payload: { + userEmail: user.email as string, + environment: env.name, + secretPath, + projectId, + requestId: secretApprovalRequest.id + } + } + }); + + await sendApprovalEmailsFn({ + projectDAL, + secretApprovalPolicyDAL, + secretApprovalRequest, + smtpService, + projectId + }); + return secretApprovalRequest; + }; + return { generateSecretApprovalRequest, + generateSecretApprovalRequestV2Bridge, mergeSecretApprovalRequest, reviewApproval, updateApprovalStatus, diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-types.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-types.ts index 1fbb754184..50a70fd605 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-types.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-types.ts @@ -26,6 +26,23 @@ export type TApprovalUpdateSecret = Partial & { tagIds?: string[]; }; +export type TApprovalCreateSecretV2Bridge = { + secretKey: string; + secretValue?: string; + secretComment?: string; + reminderNote?: string | null; + reminderRepeatDays?: number | null; + skipMultilineEncoding?: boolean; + metadata?: Record; + tagIds?: string[]; +}; + +export type TApprovalUpdateSecretV2Bridge = Partial & { + secretKey: string; + newSecretName?: string; + tagIds?: string[]; +}; + export type TGenerateSecretApprovalRequestDTO = { environment: string; secretPath: string; @@ -37,8 +54,20 @@ export type TGenerateSecretApprovalRequestDTO = { }; } & TProjectPermission; +export type TGenerateSecretApprovalRequestV2BridgeDTO = { + environment: string; + secretPath: string; + policy: TSecretApprovalPolicies; + data: { + [SecretOperations.Create]?: TApprovalCreateSecretV2Bridge[]; + [SecretOperations.Update]?: TApprovalUpdateSecretV2Bridge[]; + [SecretOperations.Delete]?: { secretKey: string }[]; + }; +} & TProjectPermission; + export type TMergeSecretApprovalRequestDTO = { approvalId: string; + bypassReason?: string; } & Omit; export type TStatusChangeDTO = { diff --git a/backend/src/ee/services/secret-replication/secret-replication-service.ts b/backend/src/ee/services/secret-replication/secret-replication-service.ts index fd2f7cc1a4..81d467bab7 100644 --- a/backend/src/ee/services/secret-replication/secret-replication-service.ts +++ b/backend/src/ee/services/secret-replication/secret-replication-service.ts @@ -1,29 +1,37 @@ -import { SecretType, TSecrets } from "@app/db/schemas"; +import { SecretType, TSecrets, TSecretsV2 } from "@app/db/schemas"; import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service"; import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal"; import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal"; import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; +import { NotFoundError } from "@app/lib/errors"; import { groupBy, unique } from "@app/lib/fn"; import { logger } from "@app/lib/logger"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { QueueName, TQueueServiceFactory } from "@app/queue"; import { ActorType } from "@app/services/auth/auth-type"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { KmsDataKey } from "@app/services/kms/kms-types"; import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; -import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; import { TSecretDALFactory } from "@app/services/secret/secret-dal"; import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns"; import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue"; import { SecretOperations } from "@app/services/secret/secret-types"; import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal"; import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal"; -import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal"; import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; import { ReservedFolders } from "@app/services/secret-folder/secret-folder-types"; import { TSecretImportDALFactory } from "@app/services/secret-import/secret-import-dal"; -import { fnSecretsFromImports } from "@app/services/secret-import/secret-import-fns"; +import { fnSecretsFromImports, fnSecretsV2FromImports } from "@app/services/secret-import/secret-import-fns"; import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; +import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; +import { + fnSecretBulkInsert as fnSecretV2BridgeBulkInsert, + fnSecretBulkUpdate as fnSecretV2BridgeBulkUpdate, + getAllSecretReferences +} from "@app/services/secret-v2-bridge/secret-v2-bridge-fns"; +import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal"; import { MAX_REPLICATION_DEPTH } from "./secret-replication-constants"; @@ -33,25 +41,42 @@ type TSecretReplicationServiceFactoryDep = { "find" | "findByBlindIndexes" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction" >; secretVersionDAL: Pick; + secretV2BridgeDAL: Pick< + TSecretV2BridgeDALFactory, + "find" | "findBySecretKeys" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction" + >; + secretVersionV2BridgeDAL: Pick< + TSecretVersionV2DALFactory, + "find" | "insertMany" | "update" | "findLatestVersionMany" + >; secretImportDAL: Pick; folderDAL: Pick< TSecretFolderDALFactory, "findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath" >; secretVersionTagDAL: Pick; + secretVersionV2TagBridgeDAL: Pick; secretQueueService: Pick; queueService: Pick; secretApprovalPolicyService: Pick; keyStore: Pick; - secretBlindIndexDAL: Pick; - secretTagDAL: Pick; + secretTagDAL: Pick< + TSecretTagDALFactory, + | "findManyTagsById" + | "saveTagsToSecret" + | "deleteTagsManySecret" + | "find" + | "saveTagsToSecretV2" + | "deleteTagsToSecretV2" + >; secretApprovalRequestDAL: Pick; - projectMembershipDAL: Pick; secretApprovalRequestSecretDAL: Pick< TSecretApprovalRequestSecretDALFactory, - "insertMany" | "insertApprovalSecretTags" + "insertMany" | "insertApprovalSecretTags" | "insertV2Bridge" >; + projectBotService: Pick; + kmsService: Pick; }; export type TSecretReplicationServiceFactory = ReturnType; @@ -92,10 +117,13 @@ export const secretReplicationServiceFactory = ({ secretApprovalRequestSecretDAL, secretApprovalRequestDAL, secretQueueService, - projectMembershipDAL, - projectBotService + projectBotService, + secretVersionV2TagBridgeDAL, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + kmsService }: TSecretReplicationServiceFactoryDep) => { - const getReplicatedSecrets = ( + const $getReplicatedSecrets = ( botKey: string, localSecrets: TSecrets[], importedSecrets: { secrets: TSecrets[] }[] @@ -122,6 +150,25 @@ export const secretReplicationServiceFactory = ({ return secrets; }; + const $getReplicatedSecretsV2 = ( + localSecrets: (TSecretsV2 & { secretKey: string; secretValue?: string })[], + importedSecrets: { secrets: (TSecretsV2 & { secretKey: string; secretValue?: string })[] }[] + ) => { + const deDupe = new Set(); + const secrets = [...localSecrets]; + + for (let i = importedSecrets.length - 1; i >= 0; i = -1) { + importedSecrets[i].secrets.forEach((el) => { + if (deDupe.has(el.key)) { + return; + } + deDupe.add(el.key); + secrets.push(el); + }); + } + return secrets; + }; + // IMPORTANT NOTE BEFORE READING THE FUNCTION // SOURCE - Where secrets are copied from // DESTINATION - Where the replicated imports that points to SOURCE from Destination @@ -142,6 +189,7 @@ export const secretReplicationServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, secretPath); if (!folder) return; + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); // the the replicated imports made to the source. These are the destinations const destinationSecretImports = await secretImportDAL.find({ @@ -194,8 +242,274 @@ export const secretReplicationServiceFactory = ({ : destinationReplicatedSecretImports; if (!destinationReplicatedSecretImports.length) return; - const botKey = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + // these are the secrets to be added in replicated folders + const sourceLocalSecrets = await secretV2BridgeDAL.find({ folderId: folder.id, type: SecretType.Shared }); + const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id }); + const sourceImportedSecrets = await fnSecretsV2FromImports({ + secretImports: sourceSecretImports, + secretDAL: secretV2BridgeDAL, + folderDAL, + secretImportDAL, + decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""), + hasSecretAccess: () => true + }); + // secrets that gets replicated across imports + const sourceDecryptedLocalSecrets = sourceLocalSecrets.map((el) => ({ + ...el, + secretKey: el.key, + secretValue: el.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() + : undefined + })); + const sourceSecrets = $getReplicatedSecretsV2(sourceDecryptedLocalSecrets, sourceImportedSecrets); + const sourceSecretsGroupByKey = groupBy(sourceSecrets, (i) => i.key); + + const lock = await keyStore.acquireLock( + [getReplicationKeyLockPrefix(projectId, environmentSlug, secretPath)], + 5000 + ); + + try { + /* eslint-disable no-await-in-loop */ + for (const destinationSecretImport of destinationReplicatedSecretImports) { + try { + const hasJobCompleted = await keyStore.getItem( + keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id), + KeyStorePrefixes.SecretReplication + ); + if (hasJobCompleted) { + logger.info( + { jobId: job.id, importId: destinationSecretImport.id }, + "Skipping this job as this has been successfully replicated." + ); + // eslint-disable-next-line + continue; + } + + const [destinationFolder] = await folderDAL.findSecretPathByFolderIds(projectId, [ + destinationSecretImport.folderId + ]); + if (!destinationFolder) + throw new NotFoundError({ + message: `Imported folder with ID '${destinationSecretImport.folderId}' not found in project with ID ${projectId}` + }); + + let destinationReplicationFolder = await folderDAL.findOne({ + parentId: destinationFolder.id, + name: getReplicationFolderName(destinationSecretImport.id), + isReserved: true + }); + if (!destinationReplicationFolder) { + destinationReplicationFolder = await folderDAL.create({ + parentId: destinationFolder.id, + name: getReplicationFolderName(destinationSecretImport.id), + envId: destinationFolder.envId, + isReserved: true + }); + } + const destinationReplicationFolderId = destinationReplicationFolder.id; + + const destinationLocalSecretsFromDB = await secretV2BridgeDAL.find({ + folderId: destinationReplicationFolderId + }); + const destinationLocalSecrets = destinationLocalSecretsFromDB.map((el) => ({ + ...el, + secretKey: el.key, + secretValue: el.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() + : undefined + })); + + const destinationLocalSecretsGroupedByKey = groupBy(destinationLocalSecrets, (i) => i.key); + + const locallyCreatedSecrets = sourceSecrets + .filter(({ key }) => !destinationLocalSecretsGroupedByKey[key]?.[0]) + .map((el) => ({ ...el, operation: SecretOperations.Create })); // rewrite update ops to create + + const locallyUpdatedSecrets = sourceSecrets + .filter( + ({ key, secretKey, secretValue }) => + destinationLocalSecretsGroupedByKey[key]?.[0] && + // if key or value changed + (destinationLocalSecretsGroupedByKey[key]?.[0]?.secretKey !== secretKey || + destinationLocalSecretsGroupedByKey[key]?.[0]?.secretValue !== secretValue) + ) + .map((el) => ({ ...el, operation: SecretOperations.Update })); // rewrite update ops to create + + const locallyDeletedSecrets = destinationLocalSecrets + .filter(({ key }) => !sourceSecretsGroupByKey[key]?.[0]) + .map((el) => ({ ...el, operation: SecretOperations.Delete })); + + const isEmtpy = + locallyCreatedSecrets.length + locallyUpdatedSecrets.length + locallyDeletedSecrets.length === 0; + // eslint-disable-next-line + if (isEmtpy) continue; + + const policy = await secretApprovalPolicyService.getSecretApprovalPolicy( + projectId, + destinationFolder.environmentSlug, + destinationFolder.path + ); + // this means it should be a approval request rather than direct replication + if (policy && actor === ActorType.USER) { + const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id); + const latestSecretVersions = await secretVersionV2BridgeDAL.findLatestVersionMany( + destinationReplicationFolderId, + localSecretsLatestVersions + ); + await secretApprovalRequestDAL.transaction(async (tx) => { + const approvalRequestDoc = await secretApprovalRequestDAL.create( + { + folderId: destinationReplicationFolderId, + slug: alphaNumericNanoId(), + policyId: policy.id, + status: "open", + hasMerged: false, + committerUserId: actorId, + isReplicated: true + }, + tx + ); + const commits = locallyCreatedSecrets + .concat(locallyUpdatedSecrets) + .concat(locallyDeletedSecrets) + .map((doc) => { + const { operation } = doc; + const localSecret = destinationLocalSecretsGroupedByKey[doc.key]?.[0]; + + return { + op: operation, + requestId: approvalRequestDoc.id, + metadata: doc.metadata, + key: doc.key, + encryptedValue: doc.encryptedValue, + encryptedComment: doc.encryptedComment, + skipMultilineEncoding: doc.skipMultilineEncoding, + // except create operation other two needs the secret id and version id + ...(operation !== SecretOperations.Create + ? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id } + : {}) + }; + }); + const approvalCommits = await secretApprovalRequestSecretDAL.insertV2Bridge(commits, tx); + + return { ...approvalRequestDoc, commits: approvalCommits }; + }); + } else { + await secretDAL.transaction(async (tx) => { + if (locallyCreatedSecrets.length) { + await fnSecretV2BridgeBulkInsert({ + folderId: destinationReplicationFolderId, + secretVersionDAL: secretVersionV2BridgeDAL, + secretDAL: secretV2BridgeDAL, + tx, + secretTagDAL, + secretVersionTagDAL: secretVersionV2TagBridgeDAL, + inputSecrets: locallyCreatedSecrets.map((doc) => { + return { + type: doc.type, + metadata: doc.metadata, + key: doc.key, + encryptedValue: doc.encryptedValue, + encryptedComment: doc.encryptedComment, + skipMultilineEncoding: doc.skipMultilineEncoding, + references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : [] + }; + }) + }); + } + if (locallyUpdatedSecrets.length) { + await fnSecretV2BridgeBulkUpdate({ + folderId: destinationReplicationFolderId, + secretVersionDAL: secretVersionV2BridgeDAL, + secretDAL: secretV2BridgeDAL, + tx, + secretTagDAL, + secretVersionTagDAL: secretVersionV2TagBridgeDAL, + inputSecrets: locallyUpdatedSecrets.map((doc) => { + return { + filter: { + folderId: destinationReplicationFolderId, + id: destinationLocalSecretsGroupedByKey[doc.key][0].id + }, + data: { + type: doc.type, + metadata: doc.metadata, + key: doc.key, + encryptedValue: doc.encryptedValue as Buffer, + encryptedComment: doc.encryptedComment, + skipMultilineEncoding: doc.skipMultilineEncoding, + references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : [] + } + }; + }) + }); + } + if (locallyDeletedSecrets.length) { + await secretV2BridgeDAL.delete( + { + $in: { + id: locallyDeletedSecrets.map(({ id }) => id) + }, + folderId: destinationReplicationFolderId + }, + tx + ); + } + }); + + await secretQueueService.syncSecrets({ + projectId, + secretPath: destinationFolder.path, + environmentSlug: destinationFolder.environmentSlug, + actorId, + actor, + _depth: depth + 1, + _deDupeReplicationQueue: deDupeReplicationQueue, + _deDupeQueue: deDupeQueue + }); + } + + // this is used to avoid multiple times generating secret approval by failed one + await keyStore.setItemWithExpiry( + keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id), + SECRET_IMPORT_SUCCESS_LOCK, + 1, + KeyStorePrefixes.SecretReplication + ); + + await secretImportDAL.updateById(destinationSecretImport.id, { + lastReplicated: new Date(), + replicationStatus: null, + isReplicationSuccess: true + }); + } catch (err) { + logger.error( + err, + `Failed to replicate secret with import id=[${destinationSecretImport.id}] env=[${destinationSecretImport.importEnv.slug}] path=[${destinationSecretImport.importPath}]` + ); + await secretImportDAL.updateById(destinationSecretImport.id, { + lastReplicated: new Date(), + replicationStatus: (err as Error)?.message.slice(0, 500), + isReplicationSuccess: false + }); + } + } + /* eslint-enable no-await-in-loop */ + } finally { + await lock.release(); + logger.info(job.data, "Replication finished"); + } + return; + } + + if (!botKey) throw new NotFoundError({ message: `Bot key not found for project with ID ${projectId}` }); // these are the secrets to be added in replicated folders const sourceLocalSecrets = await secretDAL.find({ folderId: folder.id, type: SecretType.Shared }); const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id }); @@ -206,7 +520,7 @@ export const secretReplicationServiceFactory = ({ secretImportDAL }); // secrets that gets replicated across imports - const sourceSecrets = getReplicatedSecrets(botKey, sourceLocalSecrets, sourceImportedSecrets); + const sourceSecrets = $getReplicatedSecrets(botKey, sourceLocalSecrets, sourceImportedSecrets); const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string); const lock = await keyStore.acquireLock( @@ -234,7 +548,11 @@ export const secretReplicationServiceFactory = ({ const [destinationFolder] = await folderDAL.findSecretPathByFolderIds(projectId, [ destinationSecretImport.folderId ]); - if (!destinationFolder) throw new BadRequestError({ message: "Imported folder not found" }); + if (!destinationFolder) { + throw new NotFoundError({ + message: `Imported folder with ID '${destinationSecretImport.folderId}' not found in project with ID ${projectId}` + }); + } let destinationReplicationFolder = await folderDAL.findOne({ parentId: destinationFolder.id, @@ -297,12 +615,6 @@ export const secretReplicationServiceFactory = ({ ); // this means it should be a approval request rather than direct replication if (policy && actor === ActorType.USER) { - const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId }); - if (!membership) { - logger.error("Project membership not found in %s for user %s", projectId, actorId); - return; - } - const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id); const latestSecretVersions = await secretVersionDAL.findLatestVersionMany( destinationReplicationFolderId, @@ -316,7 +628,7 @@ export const secretReplicationServiceFactory = ({ policyId: policy.id, status: "open", hasMerged: false, - committerId: membership.id, + committerUserId: actorId, isReplicated: true }, tx @@ -381,7 +693,8 @@ export const secretReplicationServiceFactory = ({ secretCommentIV: doc.secretCommentIV, secretCommentTag: doc.secretCommentTag, secretCommentCiphertext: doc.secretCommentCiphertext, - skipMultilineEncoding: doc.skipMultilineEncoding + skipMultilineEncoding: doc.skipMultilineEncoding, + references: getAllSecretReferences(doc.secretValue).nestedReferences }; }) }); @@ -416,7 +729,8 @@ export const secretReplicationServiceFactory = ({ secretCommentIV: doc.secretCommentIV, secretCommentTag: doc.secretCommentTag, secretCommentCiphertext: doc.secretCommentCiphertext, - skipMultilineEncoding: doc.skipMultilineEncoding + skipMultilineEncoding: doc.skipMultilineEncoding, + references: getAllSecretReferences(doc.secretValue).nestedReferences } }; }) diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-dal.ts b/backend/src/ee/services/secret-rotation/secret-rotation-dal.ts index 7feafdc6be..7f885e4f93 100644 --- a/backend/src/ee/services/secret-rotation/secret-rotation-dal.ts +++ b/backend/src/ee/services/secret-rotation/secret-rotation-dal.ts @@ -10,6 +10,7 @@ export type TSecretRotationDALFactory = ReturnType { const secretRotationOrm = ormify(db, TableName.SecretRotation); const secretRotationOutputOrm = ormify(db, TableName.SecretRotationOutput); + const secretRotationOutputV2Orm = ormify(db, TableName.SecretRotationOutputV2); const findQuery = (filter: TFindFilter, tx: Knex) => tx(TableName.SecretRotation) @@ -31,17 +32,11 @@ export const secretRotationDALFactory = (db: TDbClient) => { .select(tx.ref("version").withSchema(TableName.Secret).as("secVersion")) .select(tx.ref("secretKeyIV").withSchema(TableName.Secret)) .select(tx.ref("secretKeyTag").withSchema(TableName.Secret)) - .select(tx.ref("secretKeyCiphertext").withSchema(TableName.Secret)) - .select(tx.ref("secretValueIV").withSchema(TableName.Secret)) - .select(tx.ref("secretValueTag").withSchema(TableName.Secret)) - .select(tx.ref("secretValueCiphertext").withSchema(TableName.Secret)) - .select(tx.ref("secretCommentIV").withSchema(TableName.Secret)) - .select(tx.ref("secretCommentTag").withSchema(TableName.Secret)) - .select(tx.ref("secretCommentCiphertext").withSchema(TableName.Secret)); + .select(tx.ref("secretKeyCiphertext").withSchema(TableName.Secret)); const find = async (filter: TFindFilter, tx?: Knex) => { try { - const data = await findQuery(filter, tx || db); + const data = await findQuery(filter, tx || db.replicaNode()); return sqlNestRelationships({ data, key: "id", @@ -54,33 +49,65 @@ export const secretRotationDALFactory = (db: TDbClient) => { { key: "secId", label: "outputs" as const, - mapper: ({ - secId, - outputKey, - secVersion, - secretKeyIV, - secretKeyTag, - secretKeyCiphertext, - secretValueTag, - secretValueIV, - secretValueCiphertext, - secretCommentIV, - secretCommentTag, - secretCommentCiphertext - }) => ({ + mapper: ({ secId, outputKey, secVersion, secretKeyIV, secretKeyTag, secretKeyCiphertext }) => ({ key: outputKey, secret: { id: secId, version: secVersion, secretKeyIV, secretKeyTag, - secretKeyCiphertext, - secretValueTag, - secretValueIV, - secretValueCiphertext, - secretCommentIV, - secretCommentTag, - secretCommentCiphertext + secretKeyCiphertext + } + }) + } + ] + }); + } catch (error) { + throw new DatabaseError({ error, name: "SecretRotationFind" }); + } + }; + + const findQuerySecretV2 = (filter: TFindFilter, tx: Knex) => + tx(TableName.SecretRotation) + .where(filter) + .join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`) + .leftJoin( + TableName.SecretRotationOutputV2, + `${TableName.SecretRotation}.id`, + `${TableName.SecretRotationOutputV2}.rotationId` + ) + .join(TableName.SecretV2, `${TableName.SecretRotationOutputV2}.secretId`, `${TableName.SecretV2}.id`) + .select(selectAllTableCols(TableName.SecretRotation)) + .select(tx.ref("name").withSchema(TableName.Environment).as("envName")) + .select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug")) + .select(tx.ref("id").withSchema(TableName.Environment).as("envId")) + .select(tx.ref("projectId").withSchema(TableName.Environment)) + .select(tx.ref("key").withSchema(TableName.SecretRotationOutputV2).as("outputKey")) + .select(tx.ref("id").withSchema(TableName.SecretV2).as("secId")) + .select(tx.ref("version").withSchema(TableName.SecretV2).as("secVersion")) + .select(tx.ref("key").withSchema(TableName.SecretV2).as("secretKey")); + + const findSecretV2 = async (filter: TFindFilter, tx?: Knex) => { + try { + const data = await findQuerySecretV2(filter, tx || db.replicaNode()); + return sqlNestRelationships({ + data, + key: "id", + parentMapper: (el) => ({ + ...SecretRotationsSchema.parse(el), + projectId: el.projectId, + environment: { id: el.envId, name: el.envName, slug: el.envSlug } + }), + childrenMapper: [ + { + key: "secId", + label: "outputs" as const, + mapper: ({ secId, outputKey, secVersion, secretKey }) => ({ + key: outputKey, + secret: { + id: secId, + version: secVersion, + secretKey } }) } @@ -93,7 +120,7 @@ export const secretRotationDALFactory = (db: TDbClient) => { const findById = async (id: string, tx?: Knex) => { try { - const doc = await (tx || db)(TableName.SecretRotation) + const doc = await (tx || db.replicaNode())(TableName.SecretRotation) .join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`) .where({ [`${TableName.SecretRotation}.id` as "id"]: id }) .select(selectAllTableCols(TableName.SecretRotation)) @@ -114,12 +141,19 @@ export const secretRotationDALFactory = (db: TDbClient) => { }; const findRotationOutputsByRotationId = async (rotationId: string) => secretRotationOutputOrm.find({ rotationId }); + const findRotationOutputsV2ByRotationId = async (rotationId: string) => + secretRotationOutputV2Orm.find({ rotationId }); + + // special query return { ...secretRotationOrm, find, + findSecretV2, findById, secretOutputInsertMany: secretRotationOutputOrm.insertMany, - findRotationOutputsByRotationId + secretOutputV2InsertMany: secretRotationOutputV2Orm.insertMany, + findRotationOutputsByRotationId, + findRotationOutputsV2ByRotationId }; }; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-fn.ts b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-fn.ts index 93f63a6851..46c519d58e 100644 --- a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-fn.ts +++ b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-fn.ts @@ -85,7 +85,8 @@ export const secretRotationDbFn = async ({ password, username, client, - variables + variables, + options }: TSecretRotationDbFn) => { const appCfg = getConfig(); @@ -117,7 +118,8 @@ export const secretRotationDbFn = async ({ password, connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT, ssl, - pool: { min: 0, max: 1 } + pool: { min: 0, max: 1 }, + options } }); const data = await db.raw(query, variables); @@ -153,6 +155,14 @@ export const getDbSetQuery = (db: TDbProviderClients, variables: { username: str variables: [variables.username] }; } + + if (db === TDbProviderClients.MsSqlServer) { + return { + query: `ALTER LOGIN ?? WITH PASSWORD = '${variables.password}'`, + variables: [variables.username] + }; + } + // add more based on client return { query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`, diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-types.ts b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-types.ts index c40a8da0f2..e0d4c18761 100644 --- a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-types.ts +++ b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-types.ts @@ -24,4 +24,5 @@ export type TSecretRotationDbFn = { query: string; variables: unknown[]; ca?: string; + options?: Record; }; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue.ts b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue.ts index 140a9b6710..015cce0e5e 100644 --- a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue.ts +++ b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue.ts @@ -13,13 +13,17 @@ import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; import { daysToMillisecond, secondsToMillis } from "@app/lib/dates"; -import { BadRequestError } from "@app/lib/errors"; +import { NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { KmsDataKey } from "@app/services/kms/kms-types"; import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; import { TSecretDALFactory } from "@app/services/secret/secret-dal"; import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal"; +import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; +import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal"; import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; @@ -47,8 +51,11 @@ type TSecretRotationQueueFactoryDep = { secretRotationDAL: TSecretRotationDALFactory; projectBotService: Pick; secretDAL: Pick; + secretV2BridgeDAL: Pick; secretVersionDAL: Pick; + secretVersionV2BridgeDAL: Pick; telemetryService: Pick; + kmsService: Pick; }; // These error should stop the repeatable job and ask user to reconfigure rotation @@ -70,7 +77,10 @@ export const secretRotationQueueFactory = ({ projectBotService, secretDAL, secretVersionDAL, - telemetryService + telemetryService, + secretV2BridgeDAL, + secretVersionV2BridgeDAL, + kmsService }: TSecretRotationQueueFactoryDep) => { const addToQueue = async (rotationId: string, interval: number) => { const appCfg = getConfig(); @@ -84,7 +94,9 @@ export const secretRotationQueueFactory = ({ // on prod it this will be in days, in development this will be second every: appCfg.NODE_ENV === "development" ? secondsToMillis(interval) : daysToMillisecond(interval), immediately: true - } + }, + removeOnComplete: true, + removeOnFail: true } ); }; @@ -104,6 +116,7 @@ export const secretRotationQueueFactory = ({ queue.start(QueueName.SecretRotation, async (job) => { const { rotationId } = job.data; + const appCfg = getConfig(); logger.info(`secretRotationQueue.process: [rotationDocument=${rotationId}]`); const secretRotation = await secretRotationDAL.findById(rotationId); const rotationProvider = rotationTemplates.find(({ name }) => name === secretRotation?.provider); @@ -111,7 +124,13 @@ export const secretRotationQueueFactory = ({ try { if (!rotationProvider || !secretRotation) throw new DisableRotationErrors({ message: "Provider not found" }); - const rotationOutputs = await secretRotationDAL.findRotationOutputsByRotationId(rotationId); + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(secretRotation.projectId); + let rotationOutputs; + if (shouldUseSecretV2Bridge) { + rotationOutputs = await secretRotationDAL.findRotationOutputsV2ByRotationId(rotationId); + } else { + rotationOutputs = await secretRotationDAL.findRotationOutputsByRotationId(rotationId); + } if (!rotationOutputs.length) throw new DisableRotationErrors({ message: "Secrets not found" }); // deep copy @@ -156,6 +175,15 @@ export const secretRotationQueueFactory = ({ // set a random value for new password newCredential.internal.rotated_password = alphaNumericNanoId(32); const { admin_username: username, admin_password: password, host, database, port, ca } = newCredential.inputs; + + const options = + provider.template.client === TDbProviderClients.MsSqlServer + ? ({ + encrypt: appCfg.ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT, + cryptoCredentialsDetails: ca ? { ca } : {} + } as Record) + : undefined; + const dbFunctionArg = { username, password, @@ -163,8 +191,10 @@ export const secretRotationQueueFactory = ({ database, port, ca: ca as string, - client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client + client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client, + options } as TSecretRotationDbFn; + // set function await secretRotationDbFn({ ...dbFunctionArg, @@ -173,12 +203,17 @@ export const secretRotationQueueFactory = ({ username: newCredential.internal.username as string }) }); + // test function + const testQuery = + provider.template.client === TDbProviderClients.MsSqlServer ? "SELECT GETDATE()" : "SELECT NOW()"; + await secretRotationDbFn({ ...dbFunctionArg, - query: "SELECT NOW()", + query: testQuery, variables: [] }); + newCredential.outputs.db_username = newCredential.internal.username; newCredential.outputs.db_password = newCredential.internal.rotated_password; // clean up @@ -267,62 +302,117 @@ export const secretRotationQueueFactory = ({ internal: newCredential.internal }); const encVarData = infisicalSymmetricEncypt(JSON.stringify(variables)); - const key = await projectBotService.getBotKey(secretRotation.projectId); - const encryptedSecrets = rotationOutputs.map(({ key: outputKey, secretId }) => ({ - secretId, - value: encryptSymmetric128BitHexKeyUTF8( - typeof newCredential.outputs[outputKey] === "object" - ? JSON.stringify(newCredential.outputs[outputKey]) - : String(newCredential.outputs[outputKey]), - key - ) - })); - // map the final values to output keys in the board - await secretRotationDAL.transaction(async (tx) => { - await secretRotationDAL.updateById( - rotationId, - { - encryptedData: encVarData.ciphertext, - encryptedDataIV: encVarData.iv, - encryptedDataTag: encVarData.tag, - keyEncoding: encVarData.encoding, - algorithm: encVarData.algorithm, - lastRotatedAt: new Date(), - statusMessage: "Rotated successfull", - status: "success" - }, - tx - ); - const updatedSecrets = await secretDAL.bulkUpdate( - encryptedSecrets.map(({ secretId, value }) => ({ - // this secret id is validated when user is inserted - filter: { id: secretId, type: SecretType.Shared }, - data: { - secretValueCiphertext: value.ciphertext, - secretValueIV: value.iv, - secretValueTag: value.tag - } - })), - tx - ); - await secretVersionDAL.insertMany( - updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => { - if (!el.secretBlindIndex) throw new BadRequestError({ message: "Missing blind index" }); - return { - ...el, - secretId: id, - secretBlindIndex: el.secretBlindIndex - }; - }), - tx - ); + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId: secretRotation.projectId }); + const numberOfSecretsRotated = rotationOutputs.length; + if (shouldUseSecretV2Bridge) { + const encryptedSecrets = rotationOutputs.map(({ key: outputKey, secretId }) => ({ + secretId, + value: + typeof newCredential.outputs[outputKey] === "object" + ? JSON.stringify(newCredential.outputs[outputKey]) + : String(newCredential.outputs[outputKey]) + })); + // map the final values to output keys in the board + await secretRotationDAL.transaction(async (tx) => { + await secretRotationDAL.updateById( + rotationId, + { + encryptedData: encVarData.ciphertext, + encryptedDataIV: encVarData.iv, + encryptedDataTag: encVarData.tag, + keyEncoding: encVarData.encoding, + algorithm: encVarData.algorithm, + lastRotatedAt: new Date(), + statusMessage: "Rotated successfull", + status: "success" + }, + tx + ); + const updatedSecrets = await secretV2BridgeDAL.bulkUpdate( + encryptedSecrets.map(({ secretId, value }) => ({ + // this secret id is validated when user is inserted + filter: { id: secretId, type: SecretType.Shared }, + data: { + encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + } + })), + tx + ); + await secretVersionV2BridgeDAL.insertMany( + updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({ + ...el, + secretId: id + })), + tx + ); + }); + } else { + if (!botKey) + throw new NotFoundError({ + message: `Project bot not found for project with ID '${secretRotation.projectId}'` + }); + const encryptedSecrets = rotationOutputs.map(({ key: outputKey, secretId }) => ({ + secretId, + value: encryptSymmetric128BitHexKeyUTF8( + typeof newCredential.outputs[outputKey] === "object" + ? JSON.stringify(newCredential.outputs[outputKey]) + : String(newCredential.outputs[outputKey]), + botKey + ) + })); + // map the final values to output keys in the board + await secretRotationDAL.transaction(async (tx) => { + await secretRotationDAL.updateById( + rotationId, + { + encryptedData: encVarData.ciphertext, + encryptedDataIV: encVarData.iv, + encryptedDataTag: encVarData.tag, + keyEncoding: encVarData.encoding, + algorithm: encVarData.algorithm, + lastRotatedAt: new Date(), + statusMessage: "Rotated successfull", + status: "success" + }, + tx + ); + const updatedSecrets = await secretDAL.bulkUpdate( + encryptedSecrets.map(({ secretId, value }) => ({ + // this secret id is validated when user is inserted + filter: { id: secretId, type: SecretType.Shared }, + data: { + secretValueCiphertext: value.ciphertext, + secretValueIV: value.iv, + secretValueTag: value.tag + } + })), + tx + ); + await secretVersionDAL.insertMany( + updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => { + if (!el.secretBlindIndex) { + throw new NotFoundError({ message: `Secret blind index not found on secret with ID '${id}` }); + } + return { + ...el, + secretId: id, + secretBlindIndex: el.secretBlindIndex + }; + }), + tx + ); + }); + } + await telemetryService.sendPostHogEvents({ event: PostHogEventTypes.SecretRotated, distinctId: "", properties: { - numberOfSecrets: encryptedSecrets.length, + numberOfSecrets: numberOfSecretsRotated, environment: secretRotation.environment.slug, secretPath: secretRotation.secretPath, workspaceId: secretRotation.projectId @@ -331,7 +421,7 @@ export const secretRotationQueueFactory = ({ logger.info("Finished rotating: rotation id: ", rotationId); } catch (error) { - logger.error(error); + logger.error(error, "Failed to execute secret rotation"); if (error instanceof DisableRotationErrors) { if (job.id) { await queue.stopRepeatableJobByJobId(QueueName.SecretRotation, job.id); diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-service.ts b/backend/src/ee/services/secret-rotation/secret-rotation-service.ts index 1e1648a662..6dde2657f3 100644 --- a/backend/src/ee/services/secret-rotation/secret-rotation-service.ts +++ b/backend/src/ee/services/secret-rotation/secret-rotation-service.ts @@ -1,12 +1,15 @@ import { ForbiddenError, subject } from "@casl/ability"; import Ajv from "ajv"; -import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { ProjectVersion, TableName } from "@app/db/schemas"; +import { decryptSymmetric128BitHexKeyUTF8, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { TProjectPermission } from "@app/lib/types"; import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; import { TSecretDALFactory } from "@app/services/secret/secret-dal"; import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; +import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; import { TLicenseServiceFactory } from "../license/license-service"; import { TPermissionServiceFactory } from "../permission/permission-service"; @@ -22,9 +25,11 @@ type TSecretRotationServiceFactoryDep = { projectDAL: Pick; folderDAL: Pick; secretDAL: Pick; + secretV2BridgeDAL: Pick; licenseService: Pick; permissionService: Pick; secretRotationQueue: TSecretRotationQueueFactory; + projectBotService: Pick; }; export type TSecretRotationServiceFactory = ReturnType; @@ -37,7 +42,9 @@ export const secretRotationServiceFactory = ({ licenseService, projectDAL, folderDAL, - secretDAL + secretDAL, + projectBotService, + secretV2BridgeDAL }: TSecretRotationServiceFactoryDep) => { const getProviderTemplates = async ({ actor, @@ -87,20 +94,35 @@ export const secretRotationServiceFactory = ({ ); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) throw new BadRequestError({ message: "Secret path not found" }); + if (!folder) { + throw new NotFoundError({ + message: `Secret path with path '${secretPath}' not found in environment with slug '${environment}'` + }); + } ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Edit, subject(ProjectPermissionSub.Secrets, { environment, secretPath }) ); - const selectedSecrets = await secretDAL.find({ - folderId: folder.id, - $in: { id: Object.values(outputs) } - }); - if (selectedSecrets.length !== Object.values(outputs).length) - throw new BadRequestError({ message: "Secrets not found" }); - const project = await projectDAL.findById(projectId); + const shouldUseBridge = project.version === ProjectVersion.V3; + + if (shouldUseBridge) { + const selectedSecrets = await secretV2BridgeDAL.find({ + folderId: folder.id, + $in: { [`${TableName.SecretV2}.id` as "id"]: Object.values(outputs) } + }); + if (selectedSecrets.length !== Object.values(outputs).length) + throw new NotFoundError({ message: `Secrets not found in folder with ID '${folder.id}'` }); + } else { + const selectedSecrets = await secretDAL.find({ + folderId: folder.id, + $in: { id: Object.values(outputs) } + }); + if (selectedSecrets.length !== Object.values(outputs).length) + throw new NotFoundError({ message: `Secrets not found in folder with ID '${folder.id}'` }); + } + const plan = await licenseService.getPlan(project.orgId); if (!plan.secretRotation) throw new BadRequestError({ @@ -108,7 +130,7 @@ export const secretRotationServiceFactory = ({ }); const selectedTemplate = rotationTemplates.find(({ name }) => name === provider); - if (!selectedTemplate) throw new BadRequestError({ message: "Provider not found" }); + if (!selectedTemplate) throw new NotFoundError({ message: `Provider with name '${provider}' not found` }); const formattedInputs: Record = {}; Object.entries(inputs).forEach(([key, value]) => { const { type } = selectedTemplate.template.inputs.properties[key]; @@ -133,7 +155,7 @@ export const secretRotationServiceFactory = ({ creds: [] }; const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData)); - const secretRotation = secretRotationDAL.transaction(async (tx) => { + const secretRotation = await secretRotationDAL.transaction(async (tx) => { const doc = await secretRotationDAL.create( { provider, @@ -148,13 +170,21 @@ export const secretRotationServiceFactory = ({ }, tx ); - await secretRotationQueue.addToQueue(doc.id, doc.interval); - const outputSecretMapping = await secretRotationDAL.secretOutputInsertMany( - Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })), - tx - ); + let outputSecretMapping; + if (shouldUseBridge) { + outputSecretMapping = await secretRotationDAL.secretOutputV2InsertMany( + Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })), + tx + ); + } else { + outputSecretMapping = await secretRotationDAL.secretOutputInsertMany( + Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })), + tx + ); + } return { ...doc, outputs: outputSecretMapping, environment: folder.environment }; }); + await secretRotationQueue.addToQueue(secretRotation.id, secretRotation.interval); return secretRotation; }; @@ -167,13 +197,35 @@ export const secretRotationServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation); - const doc = await secretRotationDAL.find({ projectId }); - return doc; + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + const docs = await secretRotationDAL.findSecretV2({ projectId }); + return docs; + } + + if (!botKey) throw new NotFoundError({ message: `Project bot not found for project with ID '${projectId}'` }); + const docs = await secretRotationDAL.find({ projectId }); + return docs.map((el) => ({ + ...el, + outputs: el.outputs.map((output) => ({ + ...output, + secret: { + id: output.secret.id, + version: output.secret.version, + secretKey: decryptSymmetric128BitHexKeyUTF8({ + ciphertext: output.secret.secretKeyCiphertext, + iv: output.secret.secretKeyIV, + tag: output.secret.secretKeyTag, + key: botKey + }) + } + })) + })); }; const restartById = async ({ actor, actorId, actorOrgId, actorAuthMethod, rotationId }: TRestartDTO) => { const doc = await secretRotationDAL.findById(rotationId); - if (!doc) throw new BadRequestError({ message: "Rotation not found" }); + if (!doc) throw new NotFoundError({ message: `Rotation with ID '${rotationId}' not found` }); const project = await projectDAL.findById(doc.projectId); const plan = await licenseService.getPlan(project.orgId); @@ -197,7 +249,7 @@ export const secretRotationServiceFactory = ({ const deleteById = async ({ actor, actorId, actorOrgId, actorAuthMethod, rotationId }: TDeleteDTO) => { const doc = await secretRotationDAL.findById(rotationId); - if (!doc) throw new BadRequestError({ message: "Rotation not found" }); + if (!doc) throw new NotFoundError({ message: `Rotation with ID '${rotationId}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -212,9 +264,9 @@ export const secretRotationServiceFactory = ({ ); const deletedDoc = await secretRotationDAL.transaction(async (tx) => { const strat = await secretRotationDAL.deleteById(rotationId, tx); - await secretRotationQueue.removeFromQueue(strat.id, strat.interval); return strat; }); + await secretRotationQueue.removeFromQueue(deletedDoc.id, deletedDoc.interval); return { ...doc, ...deletedDoc }; }; diff --git a/backend/src/ee/services/secret-rotation/templates/index.ts b/backend/src/ee/services/secret-rotation/templates/index.ts index 05811d5bdb..39774ae28a 100644 --- a/backend/src/ee/services/secret-rotation/templates/index.ts +++ b/backend/src/ee/services/secret-rotation/templates/index.ts @@ -1,4 +1,5 @@ import { AWS_IAM_TEMPLATE } from "./aws-iam"; +import { MSSQL_TEMPLATE } from "./mssql"; import { MYSQL_TEMPLATE } from "./mysql"; import { POSTGRES_TEMPLATE } from "./postgres"; import { SENDGRID_TEMPLATE } from "./sendgrid"; @@ -26,6 +27,13 @@ export const rotationTemplates: TSecretRotationProviderTemplate[] = [ description: "Rotate MySQL@7/MariaDB user credentials", template: MYSQL_TEMPLATE }, + { + name: "mssql", + title: "Microsoft SQL Server", + image: "mssqlserver.png", + description: "Rotate Microsoft SQL server user credentials", + template: MSSQL_TEMPLATE + }, { name: "aws-iam", title: "AWS IAM", diff --git a/backend/src/ee/services/secret-rotation/templates/mssql.ts b/backend/src/ee/services/secret-rotation/templates/mssql.ts new file mode 100644 index 0000000000..30096590d9 --- /dev/null +++ b/backend/src/ee/services/secret-rotation/templates/mssql.ts @@ -0,0 +1,33 @@ +import { TDbProviderClients, TProviderFunctionTypes } from "./types"; + +export const MSSQL_TEMPLATE = { + type: TProviderFunctionTypes.DB as const, + client: TDbProviderClients.MsSqlServer, + inputs: { + type: "object" as const, + properties: { + admin_username: { type: "string" as const }, + admin_password: { type: "string" as const }, + host: { type: "string" as const }, + database: { type: "string" as const, default: "master" }, + port: { type: "integer" as const, default: "1433" }, + username1: { + type: "string", + default: "infisical-sql-user1", + desc: "SQL Server login name that must be created at server level with a matching database user" + }, + username2: { + type: "string", + default: "infisical-sql-user2", + desc: "SQL Server login name that must be created at server level with a matching database user" + }, + ca: { type: "string", desc: "SSL certificate for db auth(string)" } + }, + required: ["admin_username", "admin_password", "host", "database", "username1", "username2", "port"], + additionalProperties: false + }, + outputs: { + db_username: { type: "string" }, + db_password: { type: "string" } + } +}; diff --git a/backend/src/ee/services/secret-rotation/templates/types.ts b/backend/src/ee/services/secret-rotation/templates/types.ts index 690b6ccf02..2adc40ba37 100644 --- a/backend/src/ee/services/secret-rotation/templates/types.ts +++ b/backend/src/ee/services/secret-rotation/templates/types.ts @@ -8,7 +8,9 @@ export enum TDbProviderClients { // postgres, cockroack db, amazon red shift Pg = "pg", // mysql and maria db - MySql = "mysql" + MySql = "mysql", + + MsSqlServer = "mssql" } export enum TAwsProviderSystems { diff --git a/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue.ts b/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue.ts index 1b19fd7f55..1907ddd9a1 100644 --- a/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue.ts +++ b/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue.ts @@ -1,6 +1,6 @@ import { ProbotOctokit } from "probot"; -import { OrgMembershipRole } from "@app/db/schemas"; +import { OrgMembershipRole, TableName } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; import { logger } from "@app/lib/logger"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; @@ -61,7 +61,7 @@ export const secretScanningQueueFactory = ({ const getOrgAdminEmails = async (organizationId: string) => { // get emails of admins const adminsOfWork = await orgMemberDAL.findMembership({ - orgId: organizationId, + [`${TableName.Organization}.id` as string]: organizationId, role: OrgMembershipRole.Admin }); return adminsOfWork.filter((userObject) => userObject.email).map((userObject) => userObject.email as string); diff --git a/backend/src/ee/services/secret-scanning/secret-scanning-service.ts b/backend/src/ee/services/secret-scanning/secret-scanning-service.ts index ef511deb8d..945164094c 100644 --- a/backend/src/ee/services/secret-scanning/secret-scanning-service.ts +++ b/backend/src/ee/services/secret-scanning/secret-scanning-service.ts @@ -7,7 +7,7 @@ import { ProbotOctokit } from "probot"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { getConfig } from "@app/lib/config/env"; -import { UnauthorizedError } from "@app/lib/errors"; +import { NotFoundError } from "@app/lib/errors"; import { TGitAppDALFactory } from "./git-app-dal"; import { TGitAppInstallSessionDALFactory } from "./git-app-install-session-dal"; @@ -63,7 +63,7 @@ export const secretScanningServiceFactory = ({ actorOrgId }: TLinkInstallSessionDTO) => { const session = await gitAppInstallSessionDAL.findOne({ sessionId }); - if (!session) throw new UnauthorizedError({ message: "Session not found" }); + if (!session) throw new NotFoundError({ message: "Session was not found" }); const { permission } = await permissionService.getOrgPermission( actor, @@ -90,7 +90,7 @@ export const secretScanningServiceFactory = ({ const { data: { repositories } } = await octokit.apps.listReposAccessibleToInstallation(); - if (!appCfg.DISABLE_SECRET_SCANNING) { + if (appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(actorOrgId)) { await Promise.all( repositories.map(({ id, full_name }) => secretScanningQueue.startFullRepoScan({ @@ -164,7 +164,7 @@ export const secretScanningServiceFactory = ({ }); if (!installationLink) return; - if (!appCfg.DISABLE_SECRET_SCANNING) { + if (appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(installationLink.orgId)) { await secretScanningQueue.startPushEventScan({ commits, pusher: { name: pusher.name, email: pusher.email }, diff --git a/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts b/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts index bd87505776..481123896b 100644 --- a/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts +++ b/backend/src/ee/services/secret-snapshot/secret-snapshot-service.ts @@ -1,15 +1,22 @@ import { ForbiddenError, subject } from "@casl/ability"; -import { TableName, TSecretTagJunctionInsert } from "@app/db/schemas"; -import { BadRequestError, InternalServerError } from "@app/lib/errors"; +import { TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas"; +import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; +import { InternalServerError, NotFoundError } from "@app/lib/errors"; import { groupBy } from "@app/lib/fn"; import { logger } from "@app/lib/logger"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { KmsDataKey } from "@app/services/kms/kms-types"; +import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; import { TSecretDALFactory } from "@app/services/secret/secret-dal"; import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal"; import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal"; import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; import { TSecretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal"; import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; +import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; +import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal"; import { TLicenseServiceFactory } from "../license/license-service"; import { TPermissionServiceFactory } from "../permission/permission-service"; @@ -23,20 +30,27 @@ import { import { TSnapshotDALFactory } from "./snapshot-dal"; import { TSnapshotFolderDALFactory } from "./snapshot-folder-dal"; import { TSnapshotSecretDALFactory } from "./snapshot-secret-dal"; +import { TSnapshotSecretV2DALFactory } from "./snapshot-secret-v2-dal"; import { getFullFolderPath } from "./snapshot-service-fns"; type TSecretSnapshotServiceFactoryDep = { snapshotDAL: TSnapshotDALFactory; snapshotSecretDAL: TSnapshotSecretDALFactory; + snapshotSecretV2BridgeDAL: TSnapshotSecretV2DALFactory; snapshotFolderDAL: TSnapshotFolderDALFactory; secretVersionDAL: Pick; + secretVersionV2BridgeDAL: Pick; folderVersionDAL: Pick; secretDAL: Pick; - secretTagDAL: Pick; + secretV2BridgeDAL: Pick; + secretTagDAL: Pick; secretVersionTagDAL: Pick; + secretVersionV2TagBridgeDAL: Pick; folderDAL: Pick; permissionService: Pick; licenseService: Pick; + kmsService: Pick; + projectBotService: Pick; }; export type TSecretSnapshotServiceFactory = ReturnType; @@ -52,7 +66,13 @@ export const secretSnapshotServiceFactory = ({ permissionService, licenseService, secretTagDAL, - secretVersionTagDAL + secretVersionTagDAL, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + snapshotSecretV2BridgeDAL, + secretVersionV2TagBridgeDAL, + kmsService, + projectBotService }: TSecretSnapshotServiceFactoryDep) => { const projectSecretSnapshotCount = async ({ environment, @@ -79,10 +99,13 @@ export const secretSnapshotServiceFactory = ({ ); const folder = await folderDAL.findBySecretPath(projectId, environment, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) { + throw new NotFoundError({ + message: `Folder with path '${path}' not found in environment with slug '${environment}'` + }); + } - const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id); - return count; + return snapshotDAL.countOfSnapshotsByFolderId(folder.id); }; const listSnapshots = async ({ @@ -112,15 +135,18 @@ export const secretSnapshotServiceFactory = ({ ); const folder = await folderDAL.findBySecretPath(projectId, environment, path); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${path}' not found in environment with slug '${environment}'` + }); const snapshots = await snapshotDAL.find({ folderId: folder.id }, { limit, offset, sort: [["createdAt", "desc"]] }); return snapshots; }; const getSnapshotData = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TGetSnapshotDataDTO) => { - const snapshot = await snapshotDAL.findSecretSnapshotDataById(id); - if (!snapshot) throw new BadRequestError({ message: "Snapshot not found" }); + const snapshot = await snapshotDAL.findById(id); + if (!snapshot) throw new NotFoundError({ message: `Snapshot with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -128,31 +154,125 @@ export const secretSnapshotServiceFactory = ({ actorAuthMethod, actorOrgId ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback); + const shouldUseBridge = snapshot.projectVersion === 3; + let snapshotDetails; + if (shouldUseBridge) { + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId: snapshot.projectId + }); + const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotV2DataById(id); + snapshotDetails = { + ...encryptedSnapshotDetails, + secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({ + ...el, + secretKey: el.key, + secretValue: el.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() + : "", + secretComment: el.encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() + : "" + })) + }; + } else { + const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotDataById(id); + const { botKey } = await projectBotService.getBotKey(snapshot.projectId); + if (!botKey) + throw new NotFoundError({ message: `Project bot key not found for project with ID '${snapshot.projectId}'` }); + snapshotDetails = { + ...encryptedSnapshotDetails, + secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({ + ...el, + secretKey: decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretKeyCiphertext, + iv: el.secretKeyIV, + tag: el.secretKeyTag, + key: botKey + }), + secretValue: decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretValueCiphertext, + iv: el.secretValueIV, + tag: el.secretValueTag, + key: botKey + }), + secretComment: + el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretCommentCiphertext, + iv: el.secretCommentIV, + tag: el.secretCommentTag, + key: botKey + }) + : "" + })) + }; + } const fullFolderPath = await getFullFolderPath({ folderDAL, - folderId: snapshot.folderId, - envId: snapshot.environment.id + folderId: snapshotDetails.folderId, + envId: snapshotDetails.environment.id }); // We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder. ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment: snapshot.environment.slug, secretPath: fullFolderPath }) + subject(ProjectPermissionSub.Secrets, { + environment: snapshotDetails.environment.slug, + secretPath: fullFolderPath + }) ); - return snapshot; + return snapshotDetails; }; const performSnapshot = async (folderId: string) => { try { if (!licenseService.isValidLicense) throw new InternalServerError({ message: "Invalid license" }); + const folder = await folderDAL.findById(folderId); + if (!folder) throw new NotFoundError({ message: `Folder with ID '${folderId}' not found` }); + const shouldUseSecretV2Bridge = folder.projectVersion === 3; + + if (shouldUseSecretV2Bridge) { + const snapshot = await snapshotDAL.transaction(async (tx) => { + const secretVersions = await secretVersionV2BridgeDAL.findLatestVersionByFolderId(folderId, tx); + const folderVersions = await folderVersionDAL.findLatestVersionByFolderId(folderId, tx); + const newSnapshot = await snapshotDAL.create( + { + folderId, + envId: folder.environment.envId, + parentFolderId: folder.parentId + }, + tx + ); + + const snapshotSecrets = await snapshotSecretV2BridgeDAL.batchInsert( + secretVersions.map(({ id }) => ({ + secretVersionId: id, + envId: folder.environment.envId, + snapshotId: newSnapshot.id + })), + tx + ); + + const snapshotFolders = await snapshotFolderDAL.batchInsert( + folderVersions.map(({ id }) => ({ + folderVersionId: id, + envId: folder.environment.envId, + snapshotId: newSnapshot.id + })), + tx + ); + + return { ...newSnapshot, secrets: snapshotSecrets, folder: snapshotFolders }; + }); + return snapshot; + } const snapshot = await snapshotDAL.transaction(async (tx) => { - const folder = await folderDAL.findById(folderId, tx); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); - const secretVersions = await secretVersionDAL.findLatestVersionByFolderId(folderId, tx); const folderVersions = await folderVersionDAL.findLatestVersionByFolderId(folderId, tx); const newSnapshot = await snapshotDAL.create( @@ -199,7 +319,8 @@ export const secretSnapshotServiceFactory = ({ actorOrgId }: TRollbackSnapshotDTO) => { const snapshot = await snapshotDAL.findById(snapshotId); - if (!snapshot) throw new BadRequestError({ message: "Snapshot not found" }); + if (!snapshot) throw new NotFoundError({ message: `Snapshot with ID '${snapshotId}' not found` }); + const shouldUseBridge = snapshot.projectVersion === 3; const { permission } = await permissionService.getProjectPermission( actor, @@ -213,6 +334,117 @@ export const secretSnapshotServiceFactory = ({ ProjectPermissionSub.SecretRollback ); + if (shouldUseBridge) { + const rollback = await snapshotDAL.transaction(async (tx) => { + const rollbackSnaps = await snapshotDAL.findRecursivelySnapshotsV2Bridge(snapshot.id, tx); + // this will remove all secrets in current folder + const deletedTopLevelSecs = await secretV2BridgeDAL.delete({ folderId: snapshot.folderId }, tx); + const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id); + // this will remove all secrets and folders on child + // due to sql foreign key and link list connection removing the folders removes everything below too + const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx); + const deletedTopLevelFolders = groupBy( + deletedFolders.filter(({ parentId }) => parentId === snapshot.folderId), + (item) => item.id + ); + const folders = await folderDAL.insertMany( + rollbackSnaps.flatMap(({ folderVersion, folderId }) => + folderVersion.map(({ name, id, latestFolderVersion }) => ({ + envId: snapshot.envId, + id, + // this means don't bump up the version if not root folder + // because below ones can be same version as nothing changed + version: deletedTopLevelFolders[folderId] ? latestFolderVersion + 1 : latestFolderVersion, + name, + parentId: folderId + })) + ), + tx + ); + const secrets = await secretV2BridgeDAL.insertMany( + rollbackSnaps.flatMap(({ secretVersions, folderId }) => + secretVersions.map( + ({ latestSecretVersion, version, updatedAt, createdAt, secretId, envId, id, tags, ...el }) => ({ + ...el, + id: secretId, + version: deletedTopLevelSecsGroupById[secretId] ? latestSecretVersion + 1 : latestSecretVersion, + folderId + }) + ) + ), + tx + ); + const secretTagsToBeInsert: TSecretV2TagJunctionInsert[] = []; + const secretVerTagToBeInsert: Record = {}; + rollbackSnaps.forEach(({ secretVersions }) => { + secretVersions.forEach((secVer) => { + secVer.tags.forEach((tag) => { + secretTagsToBeInsert.push({ secrets_v2Id: secVer.secretId, secret_tagsId: tag.id }); + if (!secretVerTagToBeInsert?.[secVer.secretId]) secretVerTagToBeInsert[secVer.secretId] = []; + secretVerTagToBeInsert[secVer.secretId].push(tag.id); + }); + }); + }); + await secretTagDAL.saveTagsToSecretV2(secretTagsToBeInsert, tx); + const folderVersions = await folderVersionDAL.insertMany( + folders.map(({ version, name, id, envId }) => ({ + name, + version, + folderId: id, + envId + })), + tx + ); + const secretVersions = await secretVersionV2BridgeDAL.insertMany( + secrets.map(({ id, updatedAt, createdAt, ...el }) => ({ ...el, secretId: id })), + tx + ); + await secretVersionV2TagBridgeDAL.insertMany( + secretVersions.flatMap(({ secretId, id }) => + secretVerTagToBeInsert?.[secretId]?.length + ? secretVerTagToBeInsert[secretId].map((tagId) => ({ + [`${TableName.SecretTag}Id` as const]: tagId, + [`${TableName.SecretVersionV2}Id` as const]: id + })) + : [] + ), + tx + ); + const newSnapshot = await snapshotDAL.create( + { + folderId: snapshot.folderId, + envId: snapshot.envId, + parentFolderId: snapshot.parentFolderId + }, + tx + ); + const snapshotSecrets = await snapshotSecretV2BridgeDAL.insertMany( + secretVersions + .filter(({ secretId }) => Boolean(deletedTopLevelSecsGroupById?.[secretId])) + .map(({ id }) => ({ + secretVersionId: id, + envId: newSnapshot.envId, + snapshotId: newSnapshot.id + })), + tx + ); + const snapshotFolders = await snapshotFolderDAL.insertMany( + folderVersions + .filter(({ folderId }) => Boolean(deletedTopLevelFolders?.[folderId])) + .map(({ id }) => ({ + folderVersionId: id, + envId: newSnapshot.envId, + snapshotId: newSnapshot.id + })), + tx + ); + + return { ...newSnapshot, snapshotSecrets, snapshotFolders }; + }); + + return rollback; + } + const rollback = await snapshotDAL.transaction(async (tx) => { const rollbackSnaps = await snapshotDAL.findRecursivelySnapshots(snapshot.id, tx); // this will remove all secrets in current folder diff --git a/backend/src/ee/services/secret-snapshot/snapshot-dal.ts b/backend/src/ee/services/secret-snapshot/snapshot-dal.ts index cdd5a999b8..8a9eeab8ce 100644 --- a/backend/src/ee/services/secret-snapshot/snapshot-dal.ts +++ b/backend/src/ee/services/secret-snapshot/snapshot-dal.ts @@ -1,16 +1,22 @@ +/* eslint-disable no-await-in-loop */ import { Knex } from "knex"; +import { z } from "zod"; import { TDbClient } from "@app/db"; import { SecretVersionsSchema, + SecretVersionsV2Schema, TableName, TSecretFolderVersions, TSecretSnapshotFolders, TSecretSnapshots, - TSecretVersions + TSecretVersions, + TSecretVersionsV2 } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; export type TSnapshotDALFactory = ReturnType; @@ -19,15 +25,17 @@ export const snapshotDALFactory = (db: TDbClient) => { const findById = async (id: string, tx?: Knex) => { try { - const data = await (tx || db)(TableName.Snapshot) + const data = await (tx || db.replicaNode())(TableName.Snapshot) .where(`${TableName.Snapshot}.id`, id) .join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`) + .join(TableName.Project, `${TableName.Environment}.projectId`, `${TableName.Project}.id`) .select(selectAllTableCols(TableName.Snapshot)) .select( db.ref("id").withSchema(TableName.Environment).as("envId"), db.ref("projectId").withSchema(TableName.Environment), db.ref("name").withSchema(TableName.Environment).as("envName"), - db.ref("slug").withSchema(TableName.Environment).as("envSlug") + db.ref("slug").withSchema(TableName.Environment).as("envSlug"), + db.ref("version").withSchema(TableName.Project).as("projectVersion") ) .first(); if (data) { @@ -41,7 +49,7 @@ export const snapshotDALFactory = (db: TDbClient) => { const countOfSnapshotsByFolderId = async (folderId: string, tx?: Knex) => { try { - const doc = await (tx || db)(TableName.Snapshot) + const doc = await (tx || db.replicaNode())(TableName.Snapshot) .where({ folderId }) .groupBy(["folderId"]) .count("folderId") @@ -54,7 +62,7 @@ export const snapshotDALFactory = (db: TDbClient) => { const findSecretSnapshotDataById = async (snapshotId: string, tx?: Knex) => { try { - const data = await (tx || db)(TableName.Snapshot) + const data = await (tx || db.replicaNode())(TableName.Snapshot) .where(`${TableName.Snapshot}.id`, snapshotId) .join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`) .leftJoin(TableName.SnapshotSecret, `${TableName.Snapshot}.id`, `${TableName.SnapshotSecret}.snapshotId`) @@ -93,8 +101,7 @@ export const snapshotDALFactory = (db: TDbClient) => { db.ref("id").withSchema(TableName.SecretTag).as("tagId"), db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"), db.ref("color").withSchema(TableName.SecretTag).as("tagColor"), - db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"), - db.ref("name").withSchema(TableName.SecretTag).as("tagName") + db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug") ); return sqlNestRelationships({ data, @@ -125,9 +132,103 @@ export const snapshotDALFactory = (db: TDbClient) => { { key: "tagVersionId", label: "tags" as const, - mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({ + mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({ id, - name, + name: slug, + slug, + color, + vId + }) + } + ] + }, + { + key: "folderVerId", + label: "folderVersion" as const, + mapper: ({ folderVerId: id, folderVerName: name }) => ({ id, name }) + } + ] + })?.[0]; + } catch (error) { + throw new DatabaseError({ error, name: "FindSecretSnapshotDataById" }); + } + }; + + const findSecretSnapshotV2DataById = async (snapshotId: string, tx?: Knex) => { + try { + const data = await (tx || db.replicaNode())(TableName.Snapshot) + .where(`${TableName.Snapshot}.id`, snapshotId) + .join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`) + .leftJoin(TableName.SnapshotSecretV2, `${TableName.Snapshot}.id`, `${TableName.SnapshotSecretV2}.snapshotId`) + .leftJoin( + TableName.SecretVersionV2, + `${TableName.SnapshotSecretV2}.secretVersionId`, + `${TableName.SecretVersionV2}.id` + ) + .leftJoin( + TableName.SecretVersionV2Tag, + `${TableName.SecretVersionV2Tag}.${TableName.SecretVersionV2}Id`, + `${TableName.SecretVersionV2}.id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretVersionV2Tag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .leftJoin(TableName.SnapshotFolder, `${TableName.SnapshotFolder}.snapshotId`, `${TableName.Snapshot}.id`) + .leftJoin( + TableName.SecretFolderVersion, + `${TableName.SnapshotFolder}.folderVersionId`, + `${TableName.SecretFolderVersion}.id` + ) + .select(selectAllTableCols(TableName.SecretVersionV2)) + .select( + db.ref("id").withSchema(TableName.Snapshot).as("snapshotId"), + db.ref("createdAt").withSchema(TableName.Snapshot).as("snapshotCreatedAt"), + db.ref("updatedAt").withSchema(TableName.Snapshot).as("snapshotUpdatedAt"), + db.ref("id").withSchema(TableName.Environment).as("envId"), + db.ref("name").withSchema(TableName.Environment).as("envName"), + db.ref("slug").withSchema(TableName.Environment).as("envSlug"), + db.ref("projectId").withSchema(TableName.Environment), + db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderVerName"), + db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("folderVerId"), + db.ref("id").withSchema(TableName.SecretTag).as("tagId"), + db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"), + db.ref("color").withSchema(TableName.SecretTag).as("tagColor"), + db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug") + ); + return sqlNestRelationships({ + data, + key: "snapshotId", + parentMapper: ({ + snapshotId: id, + folderId, + projectId, + envId, + envSlug, + envName, + snapshotCreatedAt: createdAt, + snapshotUpdatedAt: updatedAt + }) => ({ + id, + folderId, + projectId, + createdAt, + updatedAt, + environment: { id: envId, slug: envSlug, name: envName } + }), + childrenMapper: [ + { + key: "id", + label: "secretVersions" as const, + mapper: (el) => SecretVersionsV2Schema.parse(el), + childrenMapper: [ + { + key: "tagVersionId", + label: "tags" as const, + mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({ + id, + name: slug, slug, color, vId @@ -251,8 +352,7 @@ export const snapshotDALFactory = (db: TDbClient) => { db.ref("id").withSchema(TableName.SecretTag).as("tagId"), db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"), db.ref("color").withSchema(TableName.SecretTag).as("tagColor"), - db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"), - db.ref("name").withSchema(TableName.SecretTag).as("tagName") + db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug") ); const formated = sqlNestRelationships({ @@ -275,9 +375,163 @@ export const snapshotDALFactory = (db: TDbClient) => { { key: "tagVersionId", label: "tags" as const, - mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({ + mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({ id, - name, + name: slug, + slug, + color, + vId + }) + } + ] + }, + { + key: "folderVerId", + label: "folderVersion" as const, + mapper: ({ folderVerId: id, folderVerName: name, latestFolderVersion }) => ({ + id, + name, + latestFolderVersion: latestFolderVersion as number + }) + } + ] + }); + return formated; + } catch (error) { + throw new DatabaseError({ error, name: "FindRecursivelySnapshots" }); + } + }; + + // this is used for rollback + // from a starting snapshot it will collect all the secrets and folder of that + // then it will start go through recursively the below folders latest snapshots then their child folder snapshot until leaf node + // the recursive part find all snapshot id + // then joins with respective secrets and folder + const findRecursivelySnapshotsV2Bridge = async (snapshotId: string, tx?: Knex) => { + try { + const data = await (tx || db) + .withRecursive("parent", (qb) => { + void qb + .from(TableName.Snapshot) + .leftJoin( + TableName.SnapshotFolder, + `${TableName.SnapshotFolder}.snapshotId`, + `${TableName.Snapshot}.id` + ) + .leftJoin( + TableName.SecretFolderVersion, + `${TableName.SnapshotFolder}.folderVersionId`, + `${TableName.SecretFolderVersion}.id` + ) + .select(selectAllTableCols(TableName.Snapshot)) + .select({ depth: 1 }) + .select( + db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderVerName"), + db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("folderVerId") + ) + .where(`${TableName.Snapshot}.id`, snapshotId) + .union( + (cb) => + void cb + .select(selectAllTableCols(TableName.Snapshot)) + .select({ depth: db.raw("parent.depth + 1") }) + .select( + db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderVerName"), + db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("folderVerId") + ) + .from(TableName.Snapshot) + .join( + db(TableName.Snapshot).groupBy("folderId").max("createdAt").select("folderId").as("latestVersion"), + `${TableName.Snapshot}.createdAt`, + "latestVersion.max" + ) + .leftJoin( + TableName.SnapshotFolder, + `${TableName.SnapshotFolder}.snapshotId`, + `${TableName.Snapshot}.id` + ) + .leftJoin( + TableName.SecretFolderVersion, + `${TableName.SnapshotFolder}.folderVersionId`, + `${TableName.SecretFolderVersion}.id` + ) + .join("parent", "parent.folderVerId", `${TableName.Snapshot}.folderId`) + ); + }) + .orderBy("depth", "asc") + .from("parent") + .leftJoin(TableName.SnapshotSecretV2, `parent.id`, `${TableName.SnapshotSecretV2}.snapshotId`) + .leftJoin( + TableName.SecretVersionV2, + `${TableName.SnapshotSecretV2}.secretVersionId`, + `${TableName.SecretVersionV2}.id` + ) + .leftJoin( + TableName.SecretVersionV2Tag, + `${TableName.SecretVersionV2Tag}.${TableName.SecretVersionV2}Id`, + `${TableName.SecretVersionV2}.id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretVersionV2Tag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .leftJoin<{ latestSecretVersion: number }>( + (tx || db)(TableName.SecretVersionV2) + .groupBy("secretId") + .select("secretId") + .max("version") + .as("secGroupByMaxVersion"), + `${TableName.SecretVersionV2}.secretId`, + "secGroupByMaxVersion.secretId" + ) + .leftJoin<{ latestFolderVersion: number }>( + (tx || db)(TableName.SecretFolderVersion) + .groupBy("folderId") + .select("folderId") + .max("version") + .as("folderGroupByMaxVersion"), + `parent.folderId`, + "folderGroupByMaxVersion.folderId" + ) + .select(selectAllTableCols(TableName.SecretVersionV2)) + .select( + db.ref("id").withSchema("parent").as("snapshotId"), + db.ref("folderId").withSchema("parent").as("snapshotFolderId"), + db.ref("parentFolderId").withSchema("parent").as("snapshotParentFolderId"), + db.ref("folderVerName").withSchema("parent"), + db.ref("folderVerId").withSchema("parent"), + db.ref("max").withSchema("secGroupByMaxVersion").as("latestSecretVersion"), + db.ref("max").withSchema("folderGroupByMaxVersion").as("latestFolderVersion"), + db.ref("id").withSchema(TableName.SecretTag).as("tagId"), + db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"), + db.ref("color").withSchema(TableName.SecretTag).as("tagColor"), + db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug") + ); + + const formated = sqlNestRelationships({ + data, + key: "snapshotId", + parentMapper: ({ snapshotId: id, snapshotFolderId: folderId, snapshotParentFolderId: parentFolderId }) => ({ + id, + folderId, + parentFolderId + }), + childrenMapper: [ + { + key: "id", + label: "secretVersions" as const, + mapper: (el) => ({ + ...SecretVersionsV2Schema.parse(el), + latestSecretVersion: el.latestSecretVersion as number + }), + childrenMapper: [ + { + key: "tagVersionId", + label: "tags" as const, + mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({ + id, + name: slug, slug, color, vId @@ -307,7 +561,7 @@ export const snapshotDALFactory = (db: TDbClient) => { // when we need to rollback we will pull from these snapshots const findLatestSnapshotByFolderId = async (folderId: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.Snapshot) + const docs = await (tx || db.replicaNode())(TableName.Snapshot) .where(`${TableName.Snapshot}.folderId`, folderId) .join( (tx || db)(TableName.Snapshot).groupBy("folderId").max("createdAt").select("folderId").as("latestVersion"), @@ -325,12 +579,248 @@ export const snapshotDALFactory = (db: TDbClient) => { } }; + /** + * Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder. + * + * This function operates in three main steps: + * 1. Pruning snapshots from current folders. + * 2. Pruning snapshots from non-current folders (versioned ones). + * 3. Removing orphaned snapshots that do not belong to any existing folder or folder version. + * + * The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant, + * to manage the large datasets without overwhelming the DB. + * + * Steps: + * - Fetch a batch of folder IDs. + * - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date. + * - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`). + * - Repeat the process for versioned folders. + * - Finally, delete orphaned snapshots that do not have an associated folder. + */ + const pruneExcessSnapshots = async () => { + const PRUNE_FOLDER_BATCH_SIZE = 10000; + + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots started`); + try { + let uuidOffset = "00000000-0000-0000-0000-000000000000"; + // cleanup snapshots from current folders + // eslint-disable-next-line no-constant-condition, no-unreachable-loop + while (true) { + const folderBatch = await db(TableName.SecretFolder) + .where("id", ">", uuidOffset) + .where("isReserved", false) + .orderBy("id", "asc") + .limit(PRUNE_FOLDER_BATCH_SIZE) + .select("id"); + + const batchEntries = folderBatch.map((folder) => folder.id); + + if (folderBatch.length) { + try { + logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`); + await db(TableName.Snapshot) + .with("snapshot_cte", (qb) => { + void qb + .from(TableName.Snapshot) + .whereIn(`${TableName.Snapshot}.folderId`, batchEntries) + .select( + "folderId", + `${TableName.Snapshot}.id as id`, + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num` + ) + ); + }) + .join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`) + .whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (err) { + logger.error( + `Failed to prune snapshots from current folders in range ${batchEntries[0]}:${ + batchEntries[batchEntries.length - 1] + }` + ); + } finally { + uuidOffset = batchEntries[batchEntries.length - 1]; + } + } else { + break; + } + } + + // cleanup snapshots from non-current folders + uuidOffset = "00000000-0000-0000-0000-000000000000"; + // eslint-disable-next-line no-constant-condition + while (true) { + const folderBatch = await db(TableName.SecretFolderVersion) + .select("folderId") + .distinct("folderId") + .where("folderId", ">", uuidOffset) + .orderBy("folderId", "asc") + .limit(PRUNE_FOLDER_BATCH_SIZE); + + const batchEntries = folderBatch.map((folder) => folder.folderId); + + if (folderBatch.length) { + try { + logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`); + await db(TableName.Snapshot) + .with("snapshot_cte", (qb) => { + void qb + .from(TableName.Snapshot) + .whereIn(`${TableName.Snapshot}.folderId`, batchEntries) + .select( + "folderId", + `${TableName.Snapshot}.id as id`, + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num` + ) + ); + }) + .join( + TableName.SecretFolderVersion, + `${TableName.SecretFolderVersion}.folderId`, + `${TableName.Snapshot}.folderId` + ) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`) + .whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (err) { + logger.error( + `Failed to prune snapshots from non-current folders in range ${batchEntries[0]}:${ + batchEntries[batchEntries.length - 1] + }` + ); + } finally { + uuidOffset = batchEntries[batchEntries.length - 1]; + } + } else { + break; + } + } + + // cleanup orphaned snapshots (those that don't belong to an existing folder and folder version) + await db(TableName.Snapshot) + .whereNotIn("folderId", (qb) => { + void qb + .select("folderId") + .from(TableName.SecretFolderVersion) + .union((qb1) => void qb1.select("id").from(TableName.SecretFolder)); + }) + .delete(); + } catch (error) { + throw new DatabaseError({ error, name: "SnapshotPrune" }); + } + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots completed`); + }; + + // special query for migration for secret v2 + const findNSecretV1SnapshotByFolderId = async (folderId: string, n = 15, tx?: Knex) => { + try { + const query = (tx || db.replicaNode())(TableName.Snapshot) + .leftJoin(TableName.SnapshotSecret, `${TableName.Snapshot}.id`, `${TableName.SnapshotSecret}.snapshotId`) + .leftJoin( + TableName.SecretVersion, + `${TableName.SnapshotSecret}.secretVersionId`, + `${TableName.SecretVersion}.id` + ) + .leftJoin( + TableName.SecretVersionTag, + `${TableName.SecretVersionTag}.${TableName.SecretVersion}Id`, + `${TableName.SecretVersion}.id` + ) + .select(selectAllTableCols(TableName.SecretVersion)) + .select( + db.ref("id").withSchema(TableName.Snapshot).as("snapshotId"), + db.ref("createdAt").withSchema(TableName.Snapshot).as("snapshotCreatedAt"), + db.ref("updatedAt").withSchema(TableName.Snapshot).as("snapshotUpdatedAt"), + db.ref("envId").withSchema(TableName.SnapshotSecret).as("snapshotEnvId"), + db.ref("id").withSchema(TableName.SecretVersionTag).as("secretVersionTagId"), + db.ref("secret_versionsId").withSchema(TableName.SecretVersionTag).as("secretVersionTagSecretId"), + db.ref("secret_tagsId").withSchema(TableName.SecretVersionTag).as("secretVersionTagSecretTagId"), + db.raw( + `DENSE_RANK() OVER (partition by ${TableName.Snapshot}."id" ORDER BY ${TableName.SecretVersion}."createdAt") as rank` + ) + ) + .orderBy(`${TableName.Snapshot}.createdAt`, "desc") + .where(`${TableName.Snapshot}.folderId`, folderId); + const data = await (tx || db) + .with("w", query) + .select("*") + .from[number]>("w") + .andWhere("w.rank", "<", n); + + return sqlNestRelationships({ + data, + key: "snapshotId", + parentMapper: ({ snapshotId: id, snapshotCreatedAt: createdAt, snapshotUpdatedAt: updatedAt }) => ({ + id, + folderId, + createdAt, + updatedAt + }), + childrenMapper: [ + { + key: "id", + label: "secretVersions" as const, + mapper: (el) => SecretVersionsSchema.extend({ snapshotEnvId: z.string() }).parse(el), + childrenMapper: [ + { + key: "secretVersionTagId", + label: "tags" as const, + mapper: ({ secretVersionTagId, secretVersionTagSecretId, secretVersionTagSecretTagId }) => ({ + id: secretVersionTagId, + secretVersionId: secretVersionTagSecretId, + secretTagId: secretVersionTagSecretTagId + }) + } + ] + } + ] + }); + } catch (error) { + throw new DatabaseError({ error, name: "FindSecretSnapshotDataById" }); + } + }; + + const deleteSnapshotsAboveLimit = async (folderId: string, n = 15, tx?: Knex) => { + try { + const query = await (tx || db) + .with("to_delete", (qb) => { + void qb + .select("id") + .from(TableName.Snapshot) + .where("folderId", folderId) + .orderBy("createdAt", "desc") + .offset(n); + }) + .from(TableName.Snapshot) + .whereIn("id", (qb) => { + void qb.select("id").from("to_delete"); + }) + .delete(); + return query; + } catch (error) { + throw new DatabaseError({ error, name: "DeleteSnapshotsAboveLimit" }); + } + }; + return { ...secretSnapshotOrm, findById, findLatestSnapshotByFolderId, findRecursivelySnapshots, + findRecursivelySnapshotsV2Bridge, countOfSnapshotsByFolderId, - findSecretSnapshotDataById + findSecretSnapshotDataById, + findSecretSnapshotV2DataById, + pruneExcessSnapshots, + findNSecretV1SnapshotByFolderId, + deleteSnapshotsAboveLimit }; }; diff --git a/backend/src/ee/services/secret-snapshot/snapshot-secret-v2-dal.ts b/backend/src/ee/services/secret-snapshot/snapshot-secret-v2-dal.ts new file mode 100644 index 0000000000..6d5f5896dc --- /dev/null +++ b/backend/src/ee/services/secret-snapshot/snapshot-secret-v2-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TSnapshotSecretV2DALFactory = ReturnType; + +export const snapshotSecretV2DALFactory = (db: TDbClient) => { + const snapshotSecretOrm = ormify(db, TableName.SnapshotSecretV2); + return snapshotSecretOrm; +}; diff --git a/backend/src/keystore/keystore.ts b/backend/src/keystore/keystore.ts index ce752a1e54..723a22817a 100644 --- a/backend/src/keystore/keystore.ts +++ b/backend/src/keystore/keystore.ts @@ -5,9 +5,33 @@ import { Redlock, Settings } from "@app/lib/red-lock"; export type TKeyStoreFactory = ReturnType; // all the key prefixes used must be set here to avoid conflict -export enum KeyStorePrefixes { - SecretReplication = "secret-replication-import-lock" -} +export const KeyStorePrefixes = { + SecretReplication: "secret-replication-import-lock", + KmsProjectDataKeyCreation: "kms-project-data-key-creation-lock", + KmsProjectKeyCreation: "kms-project-key-creation-lock", + WaitUntilReadyKmsProjectDataKeyCreation: "wait-until-ready-kms-project-data-key-creation-", + WaitUntilReadyKmsProjectKeyCreation: "wait-until-ready-kms-project-key-creation-", + KmsOrgKeyCreation: "kms-org-key-creation-lock", + KmsOrgDataKeyCreation: "kms-org-data-key-creation-lock", + WaitUntilReadyKmsOrgKeyCreation: "wait-until-ready-kms-org-key-creation-", + WaitUntilReadyKmsOrgDataKeyCreation: "wait-until-ready-kms-org-data-key-creation-", + + WaitUntilReadyProjectEnvironmentOperation: (projectId: string) => + `wait-until-ready-project-environments-operation-${projectId}`, + ProjectEnvironmentLock: (projectId: string) => `project-environment-lock-${projectId}` as const, + SyncSecretIntegrationLock: (projectId: string, environmentSlug: string, secretPath: string) => + `sync-integration-mutex-${projectId}-${environmentSlug}-${secretPath}` as const, + SyncSecretIntegrationLastRunTimestamp: (projectId: string, environmentSlug: string, secretPath: string) => + `sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const, + IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) => + `identity-access-token-status:${identityAccessTokenId}`, + ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}` +}; + +export const KeyStoreTtls = { + SetSyncSecretIntegrationLastRunTimestampInSeconds: 60, + AccessTokenStatusUpdateInSeconds: 120 +}; type TWaitTillReady = { key: string; @@ -29,10 +53,10 @@ export const keyStoreFactory = (redisUrl: string) => { const setItemWithExpiry = async ( key: string, - exp: number | string, + expiryInSeconds: number | string, value: string | number | Buffer, prefix?: string - ) => redis.setex(prefix ? `${prefix}:${key}` : key, exp, value); + ) => redis.set(prefix ? `${prefix}:${key}` : key, value, "EX", expiryInSeconds); const deleteItem = async (key: string) => redis.del(key); @@ -57,7 +81,7 @@ export const keyStoreFactory = (redisUrl: string) => { }); attempts += 1; // eslint-disable-next-line - isReady = keyCheckCb(await getItem(key, "wait_till_ready")); + isReady = keyCheckCb(await getItem(key)); } }; diff --git a/backend/src/lib/api-docs/constants.ts b/backend/src/lib/api-docs/constants.ts index 1637b266a8..136a4db29b 100644 --- a/backend/src/lib/api-docs/constants.ts +++ b/backend/src/lib/api-docs/constants.ts @@ -5,26 +5,31 @@ export const GROUPS = { role: "The role of the group to create." }, UPDATE: { - currentSlug: "The current slug of the group to update.", + id: "The ID of the group to update.", name: "The new name of the group to update to.", slug: "The new slug of the group to update to.", role: "The new role of the group to update to." }, DELETE: { - slug: "The slug of the group to delete" + id: "The ID of the group to delete.", + slug: "The slug of the group to delete." }, LIST_USERS: { - slug: "The slug of the group to list users for", + id: "The ID of the group to list users for.", offset: "The offset to start from. If you enter 10, it will start from the 10th user.", limit: "The number of users to return.", - username: "The username to search for." + username: "The username to search for.", + search: "The text string that user email or name will be filtered by." }, ADD_USER: { - slug: "The slug of the group to add the user to.", + id: "The ID of the group to add the user to.", username: "The username of the user to add to the group." }, + GET_BY_ID: { + id: "The ID of the group to fetch." + }, DELETE_USER: { - slug: "The slug of the group to remove the user from.", + id: "The ID of the group to remove the user from.", username: "The username of the user to remove from the group." } } as const; @@ -42,6 +47,13 @@ export const IDENTITIES = { }, DELETE: { identityId: "The ID of the identity to delete." + }, + GET_BY_ID: { + identityId: "The ID of the identity to get details.", + orgId: "The ID of the org of the identity" + }, + LIST: { + orgId: "The ID of the organization to list identities." } } as const; @@ -63,10 +75,13 @@ export const UNIVERSAL_AUTH = { "The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses." }, RETRIEVE: { - identityId: "The ID of the identity to retrieve." + identityId: "The ID of the identity to retrieve the auth method for." + }, + REVOKE: { + identityId: "The ID of the identity to revoke the auth method for." }, UPDATE: { - identityId: "The ID of the identity to update.", + identityId: "The ID of the identity to update the auth method for.", clientSecretTrustedIps: "The new list of IPs or CIDR ranges that the Client Secret can be used from.", accessTokenTrustedIps: "The new list of IPs or CIDR ranges that access tokens can be used from.", accessTokenTTL: "The new lifetime for an access token in seconds.", @@ -83,6 +98,10 @@ export const UNIVERSAL_AUTH = { LIST_CLIENT_SECRETS: { identityId: "The ID of the identity to list client secrets for." }, + GET_CLIENT_SECRET: { + identityId: "The ID of the identity to get the client secret from.", + clientSecretId: "The ID of the client secret to get details." + }, REVOKE_CLIENT_SECRET: { identityId: "The ID of the identity to revoke the client secret from.", clientSecretId: "The ID of the client secret to revoke." @@ -100,10 +119,233 @@ export const AWS_AUTH = { identityId: "The ID of the identity to login.", iamHttpRequestMethod: "The HTTP request method used in the signed request.", iamRequestUrl: - "The base64-encoded HTTP URL used in the signed request. Most likely, the base64-encoding of https://sts.amazonaws.com/", + "The base64-encoded HTTP URL used in the signed request. Most likely, the base64-encoding of https://sts.amazonaws.com/.", iamRequestBody: "The base64-encoded body of the signed request. Most likely, the base64-encoding of Action=GetCallerIdentity&Version=2011-06-15.", iamRequestHeaders: "The base64-encoded headers of the sts:GetCallerIdentity signed request." + }, + ATTACH: { + identityId: "The ID of the identity to attach the configuration onto.", + allowedPrincipalArns: + "The comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.", + allowedAccountIds: + "The comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.", + accessTokenTTL: "The lifetime for an access token in seconds.", + accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.", + stsEndpoint: "The endpoint URL for the AWS STS API.", + accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.", + accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from." + }, + UPDATE: { + identityId: "The ID of the identity to update the auth method for.", + allowedPrincipalArns: + "The new comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.", + allowedAccountIds: + "The new comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.", + accessTokenTTL: "The new lifetime for an access token in seconds.", + accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.", + stsEndpoint: "The new endpoint URL for the AWS STS API.", + accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.", + accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from." + }, + RETRIEVE: { + identityId: "The ID of the identity to retrieve the auth method for." + }, + REVOKE: { + identityId: "The ID of the identity to revoke the auth method for." + } +} as const; + +export const AZURE_AUTH = { + LOGIN: { + identityId: "The ID of the identity to login." + }, + ATTACH: { + identityId: "The ID of the identity to attach the configuration onto.", + tenantId: "The tenant ID for the Azure AD organization.", + resource: "The resource URL for the application registered in Azure AD.", + allowedServicePrincipalIds: + "The comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.", + accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The lifetime for an access token in seconds.", + accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The maximum number of times that an access token can be used." + }, + UPDATE: { + identityId: "The ID of the identity to update the auth method for.", + tenantId: "The new tenant ID for the Azure AD organization.", + resource: "The new resource URL for the application registered in Azure AD.", + allowedServicePrincipalIds: + "The new comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.", + accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The new lifetime for an access token in seconds.", + accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used." + }, + RETRIEVE: { + identityId: "The ID of the identity to retrieve the auth method for." + }, + REVOKE: { + identityId: "The ID of the identity to revoke the auth method for." + } +} as const; + +export const GCP_AUTH = { + LOGIN: { + identityId: "The ID of the identity to login." + }, + ATTACH: { + identityId: "The ID of the identity to attach the configuration onto.", + allowedServiceAccounts: + "The comma-separated list of trusted service account emails corresponding to the GCE resource(s) allowed to authenticate with Infisical.", + allowedProjects: + "The comma-separated list of trusted GCP projects that the GCE instance must belong to authenticate with Infisical.", + allowedZones: + "The comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.", + accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The lifetime for an access token in seconds.", + accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The maximum number of times that an access token can be used." + }, + UPDATE: { + identityId: "The ID of the identity to update the auth method for.", + allowedServiceAccounts: + "The new comma-separated list of trusted service account emails corresponding to the GCE resource(s) allowed to authenticate with Infisical.", + allowedProjects: + "The new comma-separated list of trusted GCP projects that the GCE instance must belong to authenticate with Infisical.", + allowedZones: + "The new comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.", + accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The new lifetime for an access token in seconds.", + accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used." + }, + RETRIEVE: { + identityId: "The ID of the identity to retrieve the auth method for." + }, + REVOKE: { + identityId: "The ID of the identity to revoke the auth method for." + } +} as const; + +export const KUBERNETES_AUTH = { + LOGIN: { + identityId: "The ID of the identity to login." + }, + ATTACH: { + identityId: "The ID of the identity to attach the configuration onto.", + kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.", + caCert: "The PEM-encoded CA cert for the Kubernetes API server.", + tokenReviewerJwt: + "The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.", + allowedNamespaces: + "The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.", + allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.", + allowedAudience: + "The optional audience claim that the service account JWT token must have to authenticate with Infisical.", + accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The lifetime for an access token in seconds.", + accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The maximum number of times that an access token can be used." + }, + UPDATE: { + identityId: "The ID of the identity to update the auth method for.", + kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.", + caCert: "The new PEM-encoded CA cert for the Kubernetes API server.", + tokenReviewerJwt: + "The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.", + allowedNamespaces: + "The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.", + allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.", + allowedAudience: + "The new optional audience claim that the service account JWT token must have to authenticate with Infisical.", + accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The new lifetime for an acccess token in seconds.", + accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.", + accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used." + }, + RETRIEVE: { + identityId: "The ID of the identity to retrieve the auth method for." + }, + REVOKE: { + identityId: "The ID of the identity to revoke the auth method for." + } +} as const; + +export const TOKEN_AUTH = { + ATTACH: { + identityId: "The ID of the identity to attach the configuration onto.", + accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The lifetime for an access token in seconds.", + accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The maximum number of times that an access token can be used." + }, + UPDATE: { + identityId: "The ID of the identity to update the auth method for.", + accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The new lifetime for an access token in seconds.", + accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used." + }, + RETRIEVE: { + identityId: "The ID of the identity to retrieve the auth method for." + }, + REVOKE: { + identityId: "The ID of the identity to revoke the auth method for." + }, + GET_TOKENS: { + identityId: "The ID of the identity to list token metadata for.", + offset: "The offset to start from. If you enter 10, it will start from the 10th token.", + limit: "The number of tokens to return." + }, + CREATE_TOKEN: { + identityId: "The ID of the identity to create the token for.", + name: "The name of the token to create." + }, + UPDATE_TOKEN: { + tokenId: "The ID of the token to update metadata for.", + name: "The name of the token to update to." + }, + REVOKE_TOKEN: { + tokenId: "The ID of the token to revoke." + } +} as const; + +export const OIDC_AUTH = { + LOGIN: { + identityId: "The ID of the identity to login." + }, + ATTACH: { + identityId: "The ID of the identity to attach the configuration onto.", + oidcDiscoveryUrl: "The URL used to retrieve the OpenID Connect configuration from the identity provider.", + caCert: "The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints.", + boundIssuer: "The unique identifier of the identity provider issuing the JWT.", + boundAudiences: "The list of intended recipients.", + boundClaims: "The attributes that should be present in the JWT for it to be valid.", + boundSubject: "The expected principal that is the subject of the JWT.", + accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The lifetime for an access token in seconds.", + accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The maximum number of times that an access token can be used." + }, + UPDATE: { + identityId: "The ID of the identity to update the auth method for.", + oidcDiscoveryUrl: "The new URL used to retrieve the OpenID Connect configuration from the identity provider.", + caCert: "The new PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints.", + boundIssuer: "The new unique identifier of the identity provider issuing the JWT.", + boundAudiences: "The new list of intended recipients.", + boundClaims: "The new attributes that should be present in the JWT for it to be valid.", + boundSubject: "The new expected principal that is the subject of the JWT.", + accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.", + accessTokenTTL: "The new lifetime for an access token in seconds.", + accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.", + accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used." + }, + RETRIEVE: { + identityId: "The ID of the identity to retrieve the auth method for." + }, + REVOKE: { + identityId: "The ID of the identity to revoke the auth method for." } } as const; @@ -111,17 +353,31 @@ export const ORGANIZATIONS = { LIST_USER_MEMBERSHIPS: { organizationId: "The ID of the organization to get memberships from." }, + GET_USER_MEMBERSHIP: { + organizationId: "The ID of the organization to get the membership for.", + membershipId: "The ID of the membership to get." + }, UPDATE_USER_MEMBERSHIP: { organizationId: "The ID of the organization to update the membership for.", membershipId: "The ID of the membership to update.", - role: "The new role of the membership." + role: "The new role of the membership.", + isActive: "The active status of the membership", + metadata: { + key: "The key for user metadata tag.", + value: "The value for user metadata tag." + } }, DELETE_USER_MEMBERSHIP: { organizationId: "The ID of the organization to delete the membership from.", membershipId: "The ID of the membership to delete." }, LIST_IDENTITY_MEMBERSHIPS: { - orgId: "The ID of the organization to get identity memberships from." + orgId: "The ID of the organization to get identity memberships from.", + offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.", + limit: "The number of identity memberships to return.", + orderBy: "The column to order identity memberships by.", + orderDirection: "The direction identity memberships will be sorted in.", + search: "The text string that identity membership names will be filtered by." }, GET_PROJECTS: { organizationId: "The ID of the organization to get projects from." @@ -135,7 +391,8 @@ export const PROJECTS = { CREATE: { organizationSlug: "The slug of the organization to create the project in.", projectName: "The name of the project to create.", - slug: "An optional slug for the project." + slug: "An optional slug for the project.", + template: "The name of the project template, if specified, to apply to this project." }, DELETE: { workspaceId: "The ID of the project to delete." @@ -162,27 +419,42 @@ export const PROJECTS = { secretSnapshotId: "The ID of the snapshot to rollback to." }, ADD_GROUP_TO_PROJECT: { - projectSlug: "The slug of the project to add the group to.", - groupSlug: "The slug of the group to add to the project.", + projectId: "The ID of the project to add the group to.", + groupId: "The ID of the group to add to the project.", role: "The role for the group to assume in the project." }, UPDATE_GROUP_IN_PROJECT: { - projectSlug: "The slug of the project to update the group in.", - groupSlug: "The slug of the group to update in the project.", + projectId: "The ID of the project to update the group in.", + groupId: "The ID of the group to update in the project.", roles: "A list of roles to update the group to." }, REMOVE_GROUP_FROM_PROJECT: { - projectSlug: "The slug of the project to delete the group from.", - groupSlug: "The slug of the group to delete from the project." + projectId: "The ID of the project to delete the group from.", + groupId: "The ID of the group to delete from the project." }, LIST_GROUPS_IN_PROJECT: { - projectSlug: "The slug of the project to list groups for." + projectId: "The ID of the project to list groups for." }, LIST_INTEGRATION: { workspaceId: "The ID of the project to list integrations for." }, LIST_INTEGRATION_AUTHORIZATION: { workspaceId: "The ID of the project to list integration auths for." + }, + LIST_CAS: { + slug: "The slug of the project to list CAs for.", + status: "The status of the CA to filter by.", + friendlyName: "The friendly name of the CA to filter by.", + commonName: "The common name of the CA to filter by.", + offset: "The offset to start from. If you enter 10, it will start from the 10th CA.", + limit: "The number of CAs to return." + }, + LIST_CERTIFICATES: { + slug: "The slug of the project to list certificates for.", + friendlyName: "The friendly name of the certificate to filter by.", + commonName: "The common name of the certificate to filter by.", + offset: "The offset to start from. If you enter 10, it will start from the 10th certificate.", + limit: "The number of certificates to return." } } as const; @@ -190,7 +462,9 @@ export const PROJECT_USERS = { INVITE_MEMBER: { projectId: "The ID of the project to invite the member to.", emails: "A list of organization member emails to invite to the project.", - usernames: "A list of usernames to invite to the project." + usernames: "A list of usernames to invite to the project.", + roleSlugs: + "A list of role slugs to assign to the newly created project membership. If nothing is provided, it will default to the Member role." }, REMOVE_MEMBER: { projectId: "The ID of the project to remove the member from.", @@ -202,6 +476,7 @@ export const PROJECT_USERS = { }, GET_USER_MEMBERSHIP: { workspaceId: "The ID of the project to get memberships from.", + membershipId: "The ID of the user's project membership.", username: "The username to get project membership of. Email is the default username." }, UPDATE_USER_MEMBERSHIP: { @@ -213,7 +488,12 @@ export const PROJECT_USERS = { export const PROJECT_IDENTITIES = { LIST_IDENTITY_MEMBERSHIPS: { - projectId: "The ID of the project to get identity memberships from." + projectId: "The ID of the project to get identity memberships from.", + offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.", + limit: "The number of identity memberships to return.", + orderBy: "The column to order identity memberships by.", + orderDirection: "The direction identity memberships will be sorted in.", + search: "The text string that identity membership names will be filtered by." }, GET_IDENTITY_MEMBERSHIP_BY_ID: { identityId: "The ID of the identity to get the membership for.", @@ -228,8 +508,8 @@ export const PROJECT_IDENTITIES = { isTemporary: "Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.", temporaryMode: "Type of temporary expiry.", - temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h", - temporaryAccessStartTime: "Time to which the temporary access starts" + temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s, 2m ,3h, etc.", + temporaryAccessStartTime: "Time to which the temporary access starts." } }, DELETE_IDENTITY_MEMBERSHIP: { @@ -246,8 +526,8 @@ export const PROJECT_IDENTITIES = { isTemporary: "Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.", temporaryMode: "Type of temporary expiry.", - temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h", - temporaryAccessStartTime: "Time to which the temporary access starts" + temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s, 2m, 3h, etc.", + temporaryAccessStartTime: "Time to which the temporary access starts." } } }; @@ -256,7 +536,8 @@ export const ENVIRONMENTS = { CREATE: { workspaceId: "The ID of the project to create the environment in.", name: "The name of the environment to create.", - slug: "The slug of the environment to create." + slug: "The slug of the environment to create.", + position: "The position of the environment. The lowest number will be displayed as the first environment." }, UPDATE: { workspaceId: "The ID of the project to update the environment in.", @@ -268,6 +549,10 @@ export const ENVIRONMENTS = { DELETE: { workspaceId: "The ID of the project to delete the environment from.", id: "The ID of the environment to delete." + }, + GET: { + workspaceId: "The ID of the project the environment belongs to.", + id: "The ID of the environment to fetch." } } as const; @@ -278,6 +563,9 @@ export const FOLDERS = { path: "The path to list folders from.", directory: "The directory to list folders from. (Deprecated in favor of path)" }, + GET_BY_ID: { + folderId: "The ID of the folder to get details." + }, CREATE: { workspaceId: "The ID of the project to create the folder in.", environment: "The slug of the environment to create the folder in.", @@ -309,22 +597,22 @@ export const SECRETS = { secretPath: "The path of the secret to attach tags to.", type: "The type of the secret to attach tags to. (shared/personal)", environment: "The slug of the environment where the secret is located", - projectSlug: "The slug of the project where the secret is located", + projectSlug: "The slug of the project where the secret is located.", tagSlugs: "An array of existing tag slugs to attach to the secret." }, DETACH_TAGS: { secretName: "The name of the secret to detach tags from.", secretPath: "The path of the secret to detach tags from.", type: "The type of the secret to attach tags to. (shared/personal)", - environment: "The slug of the environment where the secret is located", - projectSlug: "The slug of the project where the secret is located", + environment: "The slug of the environment where the secret is located.", + projectSlug: "The slug of the project where the secret is located.", tagSlugs: "An array of existing tag slugs to detach from the secret." } } as const; export const RAW_SECRETS = { LIST: { - expand: "Whether or not to expand secret references", + expand: "Whether or not to expand secret references.", recursive: "Whether or not to fetch all secrets from the specified base path, and all of its subdirectories. Note, the max depth is 20 deep.", workspaceId: "The ID of the project to list secrets from.", @@ -332,7 +620,8 @@ export const RAW_SECRETS = { "The slug of the project to list secrets from. This parameter is only applicable by machine identities.", environment: "The slug of the environment to list secrets from.", secretPath: "The secret path to list secrets from.", - includeImports: "Weather to include imported secrets or not." + includeImports: "Weather to include imported secrets or not.", + tagSlugs: "The comma separated tag slugs to filter secrets." }, CREATE: { secretName: "The name of the secret to create.", @@ -343,9 +632,13 @@ export const RAW_SECRETS = { secretValue: "The value of the secret to create.", skipMultilineEncoding: "Skip multiline encoding for the secret value.", type: "The type of the secret to create.", - workspaceId: "The ID of the project to create the secret in." + workspaceId: "The ID of the project to create the secret in.", + tagIds: "The ID of the tags to be attached to the created secret.", + secretReminderRepeatDays: "Interval for secret rotation notifications, measured in days.", + secretReminderNote: "Note to be attached in notification email." }, GET: { + expand: "Whether or not to expand secret references.", secretName: "The name of the secret to get.", workspaceId: "The ID of the project to get the secret from.", workspaceSlug: "The slug of the project to get the secret from.", @@ -359,12 +652,16 @@ export const RAW_SECRETS = { secretName: "The name of the secret to update.", secretComment: "Update comment to the secret.", environment: "The slug of the environment where the secret is located.", - secretPath: "The path of the secret to update", + secretPath: "The path of the secret to update.", secretValue: "The new value of the secret.", skipMultilineEncoding: "Skip multiline encoding for the secret value.", type: "The type of the secret to update.", projectSlug: "The slug of the project to update the secret in.", - workspaceId: "The ID of the project to update the secret in." + workspaceId: "The ID of the project to update the secret in.", + tagIds: "The ID of the tags to be attached to the updated secret.", + secretReminderRepeatDays: "Interval for secret rotation notifications, measured in days.", + secretReminderNote: "Note to be attached in notification email.", + newSecretName: "The new name for the secret." }, DELETE: { secretName: "The name of the secret to delete.", @@ -373,6 +670,12 @@ export const RAW_SECRETS = { type: "The type of the secret to delete.", projectSlug: "The slug of the project to delete the secret in.", workspaceId: "The ID of the project where the secret is located." + }, + GET_REFERENCE_TREE: { + secretName: "The name of the secret to get the reference tree for.", + workspaceId: "The ID of the project where the secret is located.", + environment: "The slug of the environment where the the secret is located.", + secretPath: "The folder path where the secret is located." } } as const; @@ -382,6 +685,9 @@ export const SECRET_IMPORTS = { environment: "The slug of the environment to list secret imports from.", path: "The path to list secret imports from." }, + GET: { + secretImportId: "The ID of the secret import to fetch." + }, CREATE: { environment: "The slug of the environment to import into.", path: "The path to import into.", @@ -412,11 +718,46 @@ export const SECRET_IMPORTS = { } } as const; +export const DASHBOARD = { + SECRET_OVERVIEW_LIST: { + projectId: "The ID of the project to list secrets/folders from.", + environments: + "The slugs of the environments to list secrets/folders from (comma separated, ie 'environments=dev,staging,prod').", + secretPath: "The secret path to list secrets/folders from.", + offset: "The offset to start from. If you enter 10, it will start from the 10th secret/folder.", + limit: "The number of secrets/folders to return.", + orderBy: "The column to order secrets/folders by.", + orderDirection: "The direction to order secrets/folders in.", + search: "The text string to filter secret keys and folder names by.", + includeSecrets: "Whether to include project secrets in the response.", + includeFolders: "Whether to include project folders in the response.", + includeDynamicSecrets: "Whether to include dynamic project secrets in the response." + }, + SECRET_DETAILS_LIST: { + projectId: "The ID of the project to list secrets/folders from.", + environment: "The slug of the environment to list secrets/folders from.", + secretPath: "The secret path to list secrets/folders from.", + offset: "The offset to start from. If you enter 10, it will start from the 10th secret/folder.", + limit: "The number of secrets/folders to return.", + orderBy: "The column to order secrets/folders by.", + orderDirection: "The direction to order secrets/folders in.", + search: "The text string to filter secret keys and folder names by.", + tags: "The tags to filter secrets by (comma separated, ie 'tags=billing,engineering').", + includeSecrets: "Whether to include project secrets in the response.", + includeFolders: "Whether to include project folders in the response.", + includeImports: "Whether to include project secret imports in the response.", + includeDynamicSecrets: "Whether to include dynamic project secrets in the response." + } +} as const; + export const AUDIT_LOGS = { EXPORT: { - workspaceId: "The ID of the project to export audit logs from.", + projectId: + "Optionally filter logs by project ID. If not provided, logs from the entire organization will be returned.", eventType: "The type of the event to export.", userAgentType: "Choose which consuming application to export audit logs for.", + eventMetadata: + "Filter by event metadata key-value pairs. Formatted as `key1=value1,key2=value2`, with comma-separation.", startDate: "The date to start the export from.", endDate: "The date to end the export at.", offset: "The offset to start from. If you enter 10, it will start from the 10th audit log.", @@ -457,7 +798,7 @@ export const DYNAMIC_SECRETS = { environmentSlug: "The slug of the environment to update the dynamic secret in.", path: "The path to update the dynamic secret in.", name: "The name of the dynamic secret.", - inputs: "The new partial values for the configurated provider of the dynamic secret", + inputs: "The new partial values for the configured provider of the dynamic secret", defaultTTL: "The default TTL that will be applied for all the leases.", maxTTL: "The maximum limit a TTL can be leases or renewed.", newName: "The new name for the dynamic secret." @@ -468,7 +809,7 @@ export const DYNAMIC_SECRETS = { path: "The path to delete the dynamic secret in.", name: "The name of the dynamic secret.", isForced: - "A boolean flag to delete the the dynamic secret from infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally." + "A boolean flag to delete the the dynamic secret from Infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally." } } as const; @@ -484,7 +825,7 @@ export const DYNAMIC_SECRET_LEASES = { environmentSlug: "The slug of the environment of the dynamic secret in.", path: "The path of the dynamic secret in.", dynamicSecretName: "The name of the dynamic secret.", - ttl: "The lease lifetime ttl. If not provided the default TTL of dynamic secret will be used." + ttl: "The lease lifetime TTL. If not provided the default TTL of dynamic secret will be used." }, RENEW: { projectSlug: "The slug of the project of the dynamic secret in.", @@ -499,19 +840,34 @@ export const DYNAMIC_SECRET_LEASES = { path: "The path of the dynamic secret in.", leaseId: "The ID of the dynamic secret lease.", isForced: - "A boolean flag to delete the the dynamic secret from infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally." + "A boolean flag to delete the the dynamic secret from Infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally." } } as const; export const SECRET_TAGS = { LIST: { projectId: "The ID of the project to list tags from." }, + GET_TAG_BY_ID: { + projectId: "The ID of the project to get tags from.", + tagId: "The ID of the tag to get details." + }, + GET_TAG_BY_SLUG: { + projectId: "The ID of the project to get tags from.", + tagSlug: "The slug of the tag to get details." + }, CREATE: { projectId: "The ID of the project to create the tag in.", name: "The name of the tag to create.", slug: "The slug of the tag to create.", color: "The color of the tag to create." }, + UPDATE: { + projectId: "The ID of the project to update the tag in.", + tagId: "The ID of the tag to get details.", + name: "The name of the tag to update.", + slug: "The slug of the tag to update.", + color: "The color of the tag to update." + }, DELETE: { tagId: "The ID of the tag to delete.", projectId: "The ID of the project to delete the tag from." @@ -541,8 +897,8 @@ The permission object for the privilege. privilegePermission: "The permission object for the privilege.", isPackPermission: "Whether the server should pack(compact) the permission object.", isTemporary: "Whether the privilege is temporary.", - temporaryMode: "Type of temporary access given. Types: relative", - temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d", + temporaryMode: "Type of temporary access given. Types: relative.", + temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.", temporaryAccessStartTime: "ISO time for which temporary access should begin." }, UPDATE: { @@ -567,8 +923,8 @@ The permission object for the privilege. `, privilegePermission: "The permission object for the privilege.", isTemporary: "Whether the privilege is temporary.", - temporaryMode: "Type of temporary access given. Types: relative", - temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d", + temporaryMode: "Type of temporary access given. Types: relative.", + temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.", temporaryAccessStartTime: "ISO time for which temporary access should begin." }, DELETE: { @@ -584,61 +940,102 @@ The permission object for the privilege. LIST: { projectSlug: "The slug of the project of the identity in.", identityId: "The ID of the identity to list.", - unpacked: "Whether the system should send the permissions as unpacked" + unpacked: "Whether the system should send the permissions as unpacked." } }; export const PROJECT_USER_ADDITIONAL_PRIVILEGE = { CREATE: { - projectMembershipId: "Project membership id of user", + projectMembershipId: "Project membership ID of user.", slug: "The slug of the privilege to create.", permissions: - "The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape", - isPackPermission: "Whether the server should pack(compact) the permission object.", + "The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape.", + isPackPermission: "Whether the server should pack (compact) the permission object.", isTemporary: "Whether the privilege is temporary.", - temporaryMode: "Type of temporary access given. Types: relative", - temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d", + temporaryMode: "Type of temporary access given. Types: relative.", + temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.", temporaryAccessStartTime: "ISO time for which temporary access should begin." }, UPDATE: { - privilegeId: "The id of privilege object", + privilegeId: "The ID of privilege object.", slug: "The slug of the privilege to create.", newSlug: "The new slug of the privilege to create.", permissions: - "The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape", - isPackPermission: "Whether the server should pack(compact) the permission object.", + "The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape.", + isPackPermission: "Whether the server should pack (compact) the permission object.", isTemporary: "Whether the privilege is temporary.", - temporaryMode: "Type of temporary access given. Types: relative", - temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d", + temporaryMode: "Type of temporary access given. Types: relative.", + temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.", temporaryAccessStartTime: "ISO time for which temporary access should begin." }, DELETE: { - privilegeId: "The id of privilege object" + privilegeId: "The ID of privilege object." }, - GET_BY_PRIVILEGEID: { - privilegeId: "The id of privilege object" + GET_BY_PRIVILEGE_ID: { + privilegeId: "The ID of privilege object." }, LIST: { - projectMembershipId: "Project membership id of user" + projectMembershipId: "Project membership ID of user." + } +}; + +export const IDENTITY_ADDITIONAL_PRIVILEGE_V2 = { + CREATE: { + identityId: "The ID of the identity to create the privilege for.", + projectId: "The ID of the project of the identity in.", + slug: "The slug of the privilege to create.", + permission: "The permission for the privilege.", + isTemporary: "Whether the privilege is temporary or permanent.", + temporaryMode: "Type of temporary access given. Types: relative.", + temporaryRange: "The TTL for the temporary access given. Eg: 1m, 1h, 1d.", + temporaryAccessStartTime: "The start time in ISO format when the temporary access should begin." + }, + UPDATE: { + id: "The ID of the identity privilege.", + identityId: "The ID of the identity to update.", + slug: "The slug of the privilege to update.", + privilegePermission: "The permission for the privilege.", + isTemporary: "Whether the privilege is temporary.", + temporaryMode: "Type of temporary access given. Types: relative.", + temporaryRange: "The TTL for the temporary access given. Eg: 1m, 1h, 1d.", + temporaryAccessStartTime: "The start time in ISO format when the temporary access should begin." + }, + DELETE: { + id: "The ID of the identity privilege.", + identityId: "The ID of the identity to delete.", + slug: "The slug of the privilege to delete." + }, + GET_BY_SLUG: { + projectSlug: "The slug of the project of the identity in.", + identityId: "The ID of the identity to list.", + slug: "The slug of the privilege." + }, + GET_BY_ID: { + id: "The ID of the identity privilege." + }, + LIST: { + projectId: "The ID of the project that the identity is in.", + identityId: "The ID of the identity to list." } }; export const INTEGRATION_AUTH = { GET: { - integrationAuthId: "The id of integration authentication object." + integrationAuthId: "The ID of integration authentication object." }, DELETE: { integration: "The slug of the integration to be unauthorized.", projectId: "The ID of the project to delete the integration auth from." }, DELETE_BY_ID: { - integrationAuthId: "The id of integration authentication object to delete." + integrationAuthId: "The ID of integration authentication object to delete." }, CREATE_ACCESS_TOKEN: { workspaceId: "The ID of the project to create the integration auth for.", integration: "The slug of integration for the auth object.", - accessId: "The unique authorized access id of the external integration provider.", + accessId: "The unique authorized access ID of the external integration provider.", accessToken: "The unique authorized access token of the external integration provider.", + awsAssumeIamRoleArn: "The AWS IAM Role to be assumed by Infisical.", url: "", namespace: "", refreshToken: "The refresh token for integration authorization." @@ -657,16 +1054,16 @@ export const INTEGRATION = { targetEnvironment: "The target environment of the integration provider. Used in cloudflare pages, TeamCity, Gitlab integrations.", targetEnvironmentId: - "The target environment id of the integration provider. Used in cloudflare pages, teamcity, gitlab integrations.", + "The target environment ID of the integration provider. Used in cloudflare pages, teamcity, gitlab integrations.", targetService: - "The service based grouping identifier of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank", + "The service based grouping identifier of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank.", targetServiceId: - "The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank", + "The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank.", owner: "External integration providers service entity owner. Used in Github.", - url: "The self-hosted URL of the platform to integrate with", - path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault", + url: "The self-hosted URL of the platform to integrate with.", + path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault.", region: "AWS region to sync secrets to.", - scope: "Scope of the provider. Used by Github, Qovery", + scope: "Scope of the provider. Used by Github, Qovery.", metadata: { secretPrefix: "The prefix for the saved secret. Used by GCP.", secretSuffix: "The suffix for the saved secret. Used by GCP.", @@ -675,11 +1072,15 @@ export const INTEGRATION = { shouldAutoRedeploy: "Used by Render to trigger auto deploy.", secretGCPLabel: "The label for GCP secrets.", secretAWSTag: "The tags for AWS secrets.", + githubVisibility: + "Define where the secrets from the Github Integration should be visible. Option 'selected' lets you directly define which repositories to sync secrets to.", + githubVisibilityRepoIds: + "The repository IDs to sync secrets to when using the Github Integration. Only applicable when using Organization scope, and visibility is set to 'selected'.", kmsKeyId: "The ID of the encryption key from AWS KMS.", shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.", shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.", shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.", - shouldEnableDelete: "The flag to enable deletion of secrets" + shouldEnableDelete: "The flag to enable deletion of secrets." } }, UPDATE: { @@ -698,7 +1099,7 @@ export const INTEGRATION = { integrationId: "The ID of the integration object." }, SYNC: { - integrationId: "The ID of the integration object to manually sync" + integrationId: "The ID of the integration object to manually sync." } }; @@ -706,7 +1107,7 @@ export const AUDIT_LOG_STREAMS = { CREATE: { url: "The HTTP URL to push logs to.", headers: { - desc: "The HTTP headers attached for the external prrovider requests.", + desc: "The HTTP headers attached for the external provider requests.", key: "The HTTP header key name.", value: "The HTTP header value." } @@ -715,7 +1116,7 @@ export const AUDIT_LOG_STREAMS = { id: "The ID of the audit log stream to update.", url: "The HTTP URL to push logs to.", headers: { - desc: "The HTTP headers attached for the external prrovider requests.", + desc: "The HTTP headers attached for the external provider requests.", key: "The HTTP header key name.", value: "The HTTP header value." } @@ -728,16 +1129,261 @@ export const AUDIT_LOG_STREAMS = { } }; +export const CERTIFICATE_AUTHORITIES = { + CREATE: { + projectSlug: "Slug of the project to create the CA in.", + type: "The type of CA to create.", + friendlyName: "A friendly name for the CA.", + organization: "The organization (O) for the CA.", + ou: "The organization unit (OU) for the CA.", + country: "The country name (C) for the CA.", + province: "The state of province name for the CA.", + locality: "The locality name for the CA.", + commonName: "The common name (CN) for the CA.", + notBefore: "The date and time when the CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.", + notAfter: "The date and time when the CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format.", + maxPathLength: + "The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.", + keyAlgorithm: + "The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA.", + requireTemplateForIssuance: + "Whether or not certificates for this CA can only be issued through certificate templates." + }, + GET: { + caId: "The ID of the CA to get." + }, + UPDATE: { + caId: "The ID of the CA to update.", + status: "The status of the CA to update to. This can be one of active or disabled.", + requireTemplateForIssuance: + "Whether or not certificates for this CA can only be issued through certificate templates." + }, + DELETE: { + caId: "The ID of the CA to delete." + }, + GET_CSR: { + caId: "The ID of the CA to generate CSR from.", + csr: "The generated CSR from the CA." + }, + RENEW_CA_CERT: { + caId: "The ID of the CA to renew the CA certificate for.", + type: "The type of behavior to use for the renewal operation. Currently Infisical is only able to renew a CA certificate with the same key pair.", + notAfter: "The expiry date and time for the renewed CA certificate in YYYY-MM-DDTHH:mm:ss.sssZ format.", + certificate: "The renewed CA certificate body.", + certificateChain: "The certificate chain of the CA.", + serialNumber: "The serial number of the renewed CA certificate." + }, + GET_CERT: { + caId: "The ID of the CA to get the certificate body and certificate chain from.", + certificate: "The certificate body of the CA.", + certificateChain: "The certificate chain of the CA.", + serialNumber: "The serial number of the CA certificate." + }, + GET_CERT_BY_ID: { + caId: "The ID of the CA to get the CA certificate from.", + caCertId: "The ID of the CA certificate to get." + }, + GET_CA_CERTS: { + caId: "The ID of the CA to get the CA certificates for.", + certificate: "The certificate body of the CA certificate.", + certificateChain: "The certificate chain of the CA certificate.", + serialNumber: "The serial number of the CA certificate.", + version: "The version of the CA certificate. The version is incremented for each CA renewal operation." + }, + SIGN_INTERMEDIATE: { + caId: "The ID of the CA to sign the intermediate certificate with.", + csr: "The pem-encoded CSR to sign with the CA.", + notBefore: "The date and time when the intermediate CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.", + notAfter: "The date and time when the intermediate CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format.", + maxPathLength: + "The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.", + certificate: "The signed intermediate certificate.", + certificateChain: "The certificate chain of the intermediate certificate.", + issuingCaCertificate: "The certificate of the issuing CA.", + serialNumber: "The serial number of the intermediate certificate." + }, + IMPORT_CERT: { + caId: "The ID of the CA to import the certificate for.", + certificate: "The certificate body to import.", + certificateChain: "The certificate chain to import." + }, + ISSUE_CERT: { + caId: "The ID of the CA to issue the certificate from.", + certificateTemplateId: "The ID of the certificate template to issue the certificate from.", + pkiCollectionId: "The ID of the PKI collection to add the certificate to.", + friendlyName: "A friendly name for the certificate.", + commonName: "The common name (CN) for the certificate.", + altNames: + "A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.", + ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...", + notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.", + notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format.", + certificate: "The issued certificate.", + issuingCaCertificate: "The certificate of the issuing CA.", + certificateChain: "The certificate chain of the issued certificate.", + privateKey: "The private key of the issued certificate.", + serialNumber: "The serial number of the issued certificate.", + keyUsages: "The key usage extension of the certificate.", + extendedKeyUsages: "The extended key usage extension of the certificate." + }, + SIGN_CERT: { + caId: "The ID of the CA to issue the certificate from.", + pkiCollectionId: "The ID of the PKI collection to add the certificate to.", + keyUsages: "The key usage extension of the certificate.", + extendedKeyUsages: "The extended key usage extension of the certificate.", + csr: "The pem-encoded CSR to sign with the CA to be used for certificate issuance.", + friendlyName: "A friendly name for the certificate.", + commonName: "The common name (CN) for the certificate.", + altNames: + "A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.", + ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...", + notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.", + notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format.", + certificate: "The issued certificate.", + issuingCaCertificate: "The certificate of the issuing CA.", + certificateChain: "The certificate chain of the issued certificate.", + serialNumber: "The serial number of the issued certificate." + }, + GET_CRLS: { + caId: "The ID of the CA to get the certificate revocation lists (CRLs) for.", + id: "The ID of certificate revocation list (CRL).", + crl: "The certificate revocation list (CRL)." + } +}; + +export const CERTIFICATES = { + GET: { + serialNumber: "The serial number of the certificate to get." + }, + REVOKE: { + serialNumber: + "The serial number of the certificate to revoke. The revoked certificate will be added to the certificate revocation list (CRL) of the CA.", + revocationReason: "The reason for revoking the certificate.", + revokedAt: "The date and time when the certificate was revoked.", + serialNumberRes: "The serial number of the revoked certificate." + }, + DELETE: { + serialNumber: "The serial number of the certificate to delete." + }, + GET_CERT: { + serialNumber: "The serial number of the certificate to get the certificate body and certificate chain for.", + certificate: "The certificate body of the certificate.", + certificateChain: "The certificate chain of the certificate.", + serialNumberRes: "The serial number of the certificate." + } +}; + +export const CERTIFICATE_TEMPLATES = { + CREATE: { + caId: "The ID of the certificate authority to associate the template with.", + pkiCollectionId: "The ID of the PKI collection to bind to the template.", + name: "The name of the template.", + commonName: "The regular expression string to use for validating common names.", + subjectAlternativeName: "The regular expression string to use for validating subject alternative names.", + ttl: "The max TTL for the template.", + keyUsages: "The key usage constraint or default value for when template is used during certificate issuance.", + extendedKeyUsages: + "The extended key usage constraint or default value for when template is used during certificate issuance." + }, + GET: { + certificateTemplateId: "The ID of the certificate template to get." + }, + UPDATE: { + certificateTemplateId: "The ID of the certificate template to update.", + caId: "The ID of the certificate authority to update the association with the template.", + pkiCollectionId: "The ID of the PKI collection to update the binding to the template.", + name: "The updated name of the template.", + commonName: "The updated regular expression string for validating common names.", + subjectAlternativeName: "The updated regular expression string for validating subject alternative names.", + ttl: "The updated max TTL for the template.", + keyUsages: + "The updated key usage constraint or default value for when template is used during certificate issuance.", + extendedKeyUsages: + "The updated extended key usage constraint or default value for when template is used during certificate issuance." + }, + DELETE: { + certificateTemplateId: "The ID of the certificate template to delete." + } +}; + +export const CA_CRLS = { + GET: { + crlId: "The ID of the certificate revocation list (CRL) to get.", + crl: "The certificate revocation list (CRL)." + } +}; + +export const ALERTS = { + CREATE: { + projectId: "The ID of the project to create the alert in.", + pkiCollectionId: "The ID of the PKI collection to bind to the alert.", + name: "The name of the alert.", + alertBeforeDays: "The number of days before the certificate expires to trigger the alert.", + emails: "The email addresses to send the alert email to." + }, + GET: { + alertId: "The ID of the alert to get." + }, + UPDATE: { + alertId: "The ID of the alert to update.", + name: "The name of the alert to update to.", + alertBeforeDays: "The number of days before the certificate expires to trigger the alert to update to.", + pkiCollectionId: "The ID of the PKI collection to bind to the alert to update to.", + emails: "The email addresses to send the alert email to update to." + }, + DELETE: { + alertId: "The ID of the alert to delete." + } +}; + +export const PKI_COLLECTIONS = { + CREATE: { + projectId: "The ID of the project to create the PKI collection in.", + name: "The name of the PKI collection.", + description: "A description for the PKI collection." + }, + GET: { + collectionId: "The ID of the PKI collection to get." + }, + UPDATE: { + collectionId: "The ID of the PKI collection to update.", + name: "The name of the PKI collection to update to.", + description: "The description for the PKI collection to update to." + }, + DELETE: { + collectionId: "The ID of the PKI collection to delete." + }, + LIST_ITEMS: { + collectionId: "The ID of the PKI collection to list items from.", + type: "The type of the PKI collection item to list.", + offset: "The offset to start from.", + limit: "The number of items to return." + }, + ADD_ITEM: { + collectionId: "The ID of the PKI collection to add the item to.", + type: "The type of the PKI collection item to add.", + itemId: "The resource ID of the PKI collection item to add." + }, + DELETE_ITEM: { + collectionId: "The ID of the PKI collection to delete the item from.", + collectionItemId: "The ID of the PKI collection item to delete.", + type: "The type of the deleted PKI collection item.", + itemId: "The resource ID of the deleted PKI collection item." + } +}; + export const PROJECT_ROLE = { CREATE: { projectSlug: "Slug of the project to create the role for.", + projectId: "Id of the project to create the role for.", slug: "The slug of the role.", name: "The name of the role.", description: "The description for the role.", permissions: "The permissions assigned to the role." }, UPDATE: { - projectSlug: "Slug of the project to update the role for.", + projectSlug: "The slug of the project to update the role for.", + projectId: "The ID of the project to update the role for.", roleId: "The ID of the role to update", slug: "The slug of the role.", name: "The name of the role.", @@ -745,14 +1391,70 @@ export const PROJECT_ROLE = { permissions: "The permissions assigned to the role." }, DELETE: { - projectSlug: "Slug of the project to delete this role for.", + projectSlug: "The slug of the project to delete this role for.", + projectId: "The ID of the project to delete the role for.", roleId: "The ID of the role to update" }, GET_ROLE_BY_SLUG: { projectSlug: "The slug of the project.", - roleSlug: "The slug of the role to get details" + projectId: "The ID of the project.", + roleSlug: "The slug of the role to get details." }, LIST: { - projectSlug: "The slug of the project to list the roles of." + projectSlug: "The slug of the project to list the roles of.", + projectId: "The ID of the project." + } +}; + +export const KMS = { + CREATE_KEY: { + projectId: "The ID of the project to create the key in.", + name: "The name of the key to be created. Must be slug-friendly.", + description: "An optional description of the key.", + encryptionAlgorithm: "The algorithm to use when performing cryptographic operations with the key." + }, + UPDATE_KEY: { + keyId: "The ID of the key to be updated.", + name: "The updated name of this key. Must be slug-friendly.", + description: "The updated description of this key.", + isDisabled: "The flag to enable or disable this key." + }, + DELETE_KEY: { + keyId: "The ID of the key to be deleted." + }, + LIST_KEYS: { + projectId: "The ID of the project to list keys from.", + offset: "The offset to start from. If you enter 10, it will start from the 10th key.", + limit: "The number of keys to return.", + orderBy: "The column to order keys by.", + orderDirection: "The direction to order keys in.", + search: "The text string to filter key names by." + }, + ENCRYPT: { + keyId: "The ID of the key to encrypt the data with.", + plaintext: "The plaintext to be encrypted (base64 encoded)." + }, + DECRYPT: { + keyId: "The ID of the key to decrypt the data with.", + ciphertext: "The ciphertext to be decrypted (base64 encoded)." + } +}; + +export const ProjectTemplates = { + CREATE: { + name: "The name of the project template to be created. Must be slug-friendly.", + description: "An optional description of the project template.", + roles: "The roles to be created when the template is applied to a project.", + environments: "The environments to be created when the template is applied to a project." + }, + UPDATE: { + templateId: "The ID of the project template to be updated.", + name: "The updated name of the project template. Must be slug-friendly.", + description: "The updated description of the project template.", + roles: "The updated roles to be created when the template is applied to a project.", + environments: "The updated environments to be created when the template is applied to a project." + }, + DELETE: { + templateId: "The ID of the project template to be deleted." } }; diff --git a/backend/src/lib/axios/digest-auth.ts b/backend/src/lib/axios/digest-auth.ts new file mode 100644 index 0000000000..ee9dbd79b1 --- /dev/null +++ b/backend/src/lib/axios/digest-auth.ts @@ -0,0 +1,57 @@ +import crypto from "node:crypto"; + +import { AxiosError, AxiosInstance, AxiosRequestConfig } from "axios"; + +export const createDigestAuthRequestInterceptor = ( + axiosInstance: AxiosInstance, + username: string, + password: string +) => { + let nc = 0; + + return async (opts: AxiosRequestConfig) => { + try { + return await axiosInstance.request(opts); + } catch (err) { + const error = err as AxiosError; + const authHeader = (error?.response?.headers?.["www-authenticate"] as string) || ""; + + if (error?.response?.status !== 401 || !authHeader?.includes("nonce")) { + return Promise.reject(error.message); + } + + if (!error.config) { + return Promise.reject(error); + } + + const authDetails = authHeader.split(",").map((el) => el.split("=")); + nc += 1; + const nonceCount = nc.toString(16).padStart(8, "0"); + const cnonce = crypto.randomBytes(24).toString("hex"); + const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1].replace(/"/g, ""); + const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1].replace(/"/g, ""); + const ha1 = crypto.createHash("md5").update(`${username}:${realm}:${password}`).digest("hex"); + const path = opts.url; + + const ha2 = crypto + .createHash("md5") + .update(`${opts.method ?? "GET"}:${path}`) + .digest("hex"); + + const response = crypto + .createHash("md5") + .update(`${ha1}:${nonce}:${nonceCount}:${cnonce}:auth:${ha2}`) + .digest("hex"); + const authorization = `Digest username="${username}",realm="${realm}",nonce="${nonce}",uri="${path}",qop="auth",algorithm="MD5",response="${response}",nc="${nonceCount}",cnonce="${cnonce}"`; + + if (opts.headers) { + // eslint-disable-next-line + opts.headers.authorization = authorization; + } else { + // eslint-disable-next-line + opts.headers = { authorization }; + } + return axiosInstance.request(opts); + } + }; +}; diff --git a/backend/src/lib/base64/index.ts b/backend/src/lib/base64/index.ts new file mode 100644 index 0000000000..cfc0fde3f1 --- /dev/null +++ b/backend/src/lib/base64/index.ts @@ -0,0 +1,28 @@ +// Credit: https://github.com/miguelmota/is-base64 +export const isBase64 = ( + v: string, + opts = { allowEmpty: false, mimeRequired: false, allowMime: true, paddingRequired: false } +) => { + if (opts.allowEmpty === false && v === "") { + return false; + } + + let regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+/]{3}=)?"; + const mimeRegex = "(data:\\w+\\/[a-zA-Z\\+\\-\\.]+;base64,)"; + + if (opts.mimeRequired === true) { + regex = mimeRegex + regex; + } else if (opts.allowMime === true) { + regex = `${mimeRegex}?${regex}`; + } + + if (opts.paddingRequired === false) { + regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}(==)?|[A-Za-z0-9+\\/]{3}=?)?"; + } + + return new RegExp(`^${regex}$`, "gi").test(v); +}; + +export const getBase64SizeInBytes = (base64String: string) => { + return Buffer.from(base64String, "base64").length; +}; diff --git a/backend/src/lib/casl/index.ts b/backend/src/lib/casl/index.ts index 9e5cb29d3d..71625e181c 100644 --- a/backend/src/lib/casl/index.ts +++ b/backend/src/lib/casl/index.ts @@ -23,8 +23,19 @@ export const conditionsMatcher = buildMongoQueryMatcher({ $glob }, { glob }); /** * Extracts and formats permissions from a CASL Ability object or a raw permission set. */ -const extractPermissions = (ability: MongoAbility) => - ability.rules.map((permission) => `${permission.action as string}_${permission.subject as string}`); +const extractPermissions = (ability: MongoAbility) => { + const permissions: string[] = []; + ability.rules.forEach((permission) => { + if (typeof permission.action === "string") { + permissions.push(`${permission.action}_${permission.subject as string}`); + } else { + permission.action.forEach((permissionAction) => { + permissions.push(`${permissionAction}_${permission.subject as string}`); + }); + } + }); + return permissions; +}; /** * Compares two sets of permissions to determine if the first set is at least as privileged as the second set. diff --git a/backend/src/lib/config/env.ts b/backend/src/lib/config/env.ts index 76defa597c..74a88a73cd 100644 --- a/backend/src/lib/config/env.ts +++ b/backend/src/lib/config/env.ts @@ -1,18 +1,30 @@ import { Logger } from "pino"; import { z } from "zod"; +import { removeTrailingSlash } from "../fn"; import { zpStr } from "../zod"; export const GITLAB_URL = "https://gitlab.com"; +// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any -- If `process.pkg` is set, and it's true, then it means that the app is currently running in a packaged environment (a binary) +export const IS_PACKAGED = (process as any)?.pkg !== undefined; + const zodStrBool = z .enum(["true", "false"]) .optional() .transform((val) => val === "true"); +const databaseReadReplicaSchema = z + .object({ + DB_CONNECTION_URI: z.string().describe("Postgres read replica database connection string"), + DB_ROOT_CERT: zpStr(z.string().optional().describe("Postgres read replica database certificate string")) + }) + .array() + .optional(); + const envSchema = z .object({ - PORT: z.coerce.number().default(4000), + PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000), DISABLE_SECRET_SCANNING: z .enum(["true", "false"]) .default("false") @@ -22,6 +34,12 @@ const envSchema = z DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default( `postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}` ), + AUDIT_LOGS_DB_CONNECTION_URI: zpStr( + z.string().describe("Postgres database connection string for Audit logs").optional() + ), + AUDIT_LOGS_DB_ROOT_CERT: zpStr( + z.string().describe("Postgres database base64-encoded CA cert for Audit logs").optional() + ), MAX_LEASE_LIMIT: z.coerce.number().default(10000), DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()), DB_HOST: zpStr(z.string().describe("Postgres database host").optional()), @@ -29,7 +47,8 @@ const envSchema = z DB_USER: zpStr(z.string().describe("Postgres database username").optional()), DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()), DB_NAME: zpStr(z.string().describe("Postgres database name").optional()), - + DB_READ_REPLICAS: zpStr(z.string().describe("Postgres read replicas").optional()), + BCRYPT_SALT_ROUND: z.number().default(12), NODE_ENV: z.enum(["development", "test", "production"]).default("production"), SALT_ROUNDS: z.coerce.number().default(10), INITIAL_ORGANIZATION_NAME: zpStr(z.string().optional()), @@ -51,7 +70,9 @@ const envSchema = z .string() .min(32) .default("#5VihU%rbXHcHwWwCot5L3vyPsx$7dWYw^iGk!EJg2bC*f$PD$%KCqx^R@#^LSEf"), - SITE_URL: zpStr(z.string().optional()), + + // Ensure that the SITE_URL never ends with a trailing slash + SITE_URL: zpStr(z.string().transform((val) => (val ? removeTrailingSlash(val) : val))).optional(), // Telemetry TELEMETRY_ENABLED: zodStrBool.default("true"), POSTHOG_HOST: zpStr(z.string().optional().default("https://app.posthog.com")), @@ -62,6 +83,7 @@ const envSchema = z JWT_AUTH_LIFETIME: zpStr(z.string().default("10d")), JWT_SIGNUP_LIFETIME: zpStr(z.string().default("15m")), JWT_REFRESH_LIFETIME: zpStr(z.string().default("90d")), + JWT_INVITE_LIFETIME: zpStr(z.string().default("1d")), JWT_MFA_LIFETIME: zpStr(z.string().default("5m")), JWT_PROVIDER_AUTH_LIFETIME: zpStr(z.string().default("15m")), // Oauth @@ -95,12 +117,22 @@ const envSchema = z // gcp secret manager CLIENT_ID_GCP_SECRET_MANAGER: zpStr(z.string().optional()), CLIENT_SECRET_GCP_SECRET_MANAGER: zpStr(z.string().optional()), - // github + // github oauth CLIENT_ID_GITHUB: zpStr(z.string().optional()), CLIENT_SECRET_GITHUB: zpStr(z.string().optional()), + // github app + CLIENT_ID_GITHUB_APP: zpStr(z.string().optional()), + CLIENT_SECRET_GITHUB_APP: zpStr(z.string().optional()), + CLIENT_PRIVATE_KEY_GITHUB_APP: zpStr(z.string().optional()), + CLIENT_APP_ID_GITHUB_APP: z.coerce.number().optional(), + CLIENT_SLUG_GITHUB_APP: zpStr(z.string().optional()), + // azure CLIENT_ID_AZURE: zpStr(z.string().optional()), CLIENT_SECRET_AZURE: zpStr(z.string().optional()), + // aws + CLIENT_ID_AWS_INTEGRATION: zpStr(z.string().optional()), + CLIENT_SECRET_AWS_INTEGRATION: zpStr(z.string().optional()), // gitlab CLIENT_ID_GITLAB: zpStr(z.string().optional()), CLIENT_SECRET_GITLAB: zpStr(z.string().optional()), @@ -110,6 +142,7 @@ const envSchema = z SECRET_SCANNING_WEBHOOK_SECRET: zpStr(z.string().optional()), SECRET_SCANNING_GIT_APP_ID: zpStr(z.string().optional()), SECRET_SCANNING_PRIVATE_KEY: zpStr(z.string().optional()), + SECRET_SCANNING_ORG_WHITELIST: zpStr(z.string().optional()), // LICENSE LICENSE_SERVER_URL: zpStr(z.string().optional().default("https://portal.infisical.com")), LICENSE_SERVER_KEY: zpStr(z.string().optional()), @@ -119,30 +152,60 @@ const envSchema = z // GENERIC STANDALONE_MODE: z .enum(["true", "false"]) - .transform((val) => val === "true") + .transform((val) => val === "true" || IS_PACKAGED) .optional(), INFISICAL_CLOUD: zodStrBool.default("false"), MAINTENANCE_MODE: zodStrBool.default("false"), CAPTCHA_SECRET: zpStr(z.string().optional()), + + // TELEMETRY OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"), OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()), OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000), OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()), OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()), - OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional() + OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(), + + PLAIN_API_KEY: zpStr(z.string().optional()), + PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()), + DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"), + SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"), + WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()), + WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()), + ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"), + + // HSM + HSM_LIB_PATH: zpStr(z.string().optional()), + HSM_PIN: zpStr(z.string().optional()), + HSM_KEY_LABEL: zpStr(z.string().optional()), + HSM_SLOT: z.coerce.number().optional().default(0) }) + // To ensure that basic encryption is always possible. + .refine( + (data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY), + "Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined." + ) .transform((data) => ({ ...data, + + DB_READ_REPLICAS: data.DB_READ_REPLICAS + ? databaseReadReplicaSchema.parse(JSON.parse(data.DB_READ_REPLICAS)) + : undefined, isCloud: Boolean(data.LICENSE_SERVER_KEY), isSmtpConfigured: Boolean(data.SMTP_HOST), isRedisConfigured: Boolean(data.REDIS_URL), isDevelopmentMode: data.NODE_ENV === "development", - isProductionMode: data.NODE_ENV === "production", + isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED, + isSecretScanningConfigured: Boolean(data.SECRET_SCANNING_GIT_APP_ID) && Boolean(data.SECRET_SCANNING_PRIVATE_KEY) && Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET), - samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG + isHsmConfigured: + Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined, + + samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG, + SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",") })); let envCfg: Readonly>; diff --git a/backend/src/lib/crypto/encryption.ts b/backend/src/lib/crypto/encryption.ts index 6af20862b2..258a6d2851 100644 --- a/backend/src/lib/crypto/encryption.ts +++ b/backend/src/lib/crypto/encryption.ts @@ -116,6 +116,8 @@ export const decryptAsymmetric = ({ ciphertext, nonce, publicKey, privateKey }: export const generateSymmetricKey = (size = 32) => crypto.randomBytes(size).toString("base64"); +export const generateHash = (value: string) => crypto.createHash("sha256").update(value).digest("hex"); + export const generateAsymmetricKeyPair = () => { const pair = nacl.box.keyPair(); @@ -224,8 +226,9 @@ export const infisicalSymmetricDecrypt = ({ keyEncoding: SecretKeyEncoding; }) => { const appCfg = getConfig(); - const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY; - const encryptionKey = appCfg.ENCRYPTION_KEY; + // the or gate is used used in migration + const rootEncryptionKey = appCfg?.ROOT_ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY; + const encryptionKey = appCfg?.ENCRYPTION_KEY || process.env.ENCRYPTION_KEY; if (rootEncryptionKey && keyEncoding === SecretKeyEncoding.BASE64) { const data = decryptSymmetric({ key: rootEncryptionKey, iv, tag, ciphertext }); return data as T; diff --git a/backend/src/lib/crypto/srp.ts b/backend/src/lib/crypto/srp.ts index bc29cdb3f7..e6afd0f99d 100644 --- a/backend/src/lib/crypto/srp.ts +++ b/backend/src/lib/crypto/srp.ts @@ -5,8 +5,9 @@ import nacl from "tweetnacl"; import tweetnacl from "tweetnacl-util"; import { TUserEncryptionKeys } from "@app/db/schemas"; +import { UserEncryption } from "@app/services/user/user-types"; -import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption"; +import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption"; export const generateSrpServerKey = async (salt: string, verifier: string) => { // eslint-disable-next-line new-cap @@ -36,12 +37,16 @@ export const srpCheckClientProof = async ( // Ghost user related: // This functionality is intended for ghost user logic. This happens on the frontend when a user is being created. // We replicate the same functionality on the backend when creating a ghost user. -export const generateUserSrpKeys = async (email: string, password: string) => { +export const generateUserSrpKeys = async ( + email: string, + password: string, + customKeys?: { publicKey: string; privateKey: string } +) => { const pair = nacl.box.keyPair(); const secretKeyUint8Array = pair.secretKey; const publicKeyUint8Array = pair.publicKey; - const privateKey = tweetnacl.encodeBase64(secretKeyUint8Array); - const publicKey = tweetnacl.encodeBase64(publicKeyUint8Array); + const privateKey = customKeys?.privateKey || tweetnacl.encodeBase64(secretKeyUint8Array); + const publicKey = customKeys?.publicKey || tweetnacl.encodeBase64(publicKeyUint8Array); // eslint-disable-next-line const client = new jsrp.client(); @@ -97,30 +102,60 @@ export const generateUserSrpKeys = async (email: string, password: string) => { }; }; -export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => { - const derivedKey = await argon2.hash(password, { - salt: Buffer.from(user.salt), - memoryCost: 65536, - timeCost: 3, - parallelism: 1, - hashLength: 32, - type: argon2.argon2id, - raw: true - }); - if (!derivedKey) throw new Error("Failed to derive key from password"); - const key = decryptSymmetric({ - ciphertext: user.protectedKey!, - iv: user.protectedKeyIV!, - tag: user.protectedKeyTag!, - key: derivedKey.toString("base64") - }); - const privateKey = decryptSymmetric({ - ciphertext: user.encryptedPrivateKey, - iv: user.iv, - tag: user.tag, - key - }); - return privateKey; +export const getUserPrivateKey = async ( + password: string, + user: Pick< + TUserEncryptionKeys, + | "protectedKeyTag" + | "protectedKey" + | "protectedKeyIV" + | "encryptedPrivateKey" + | "iv" + | "salt" + | "tag" + | "encryptionVersion" + > +) => { + if (user.encryptionVersion === UserEncryption.V1) { + return decryptSymmetric128BitHexKeyUTF8({ + ciphertext: user.encryptedPrivateKey, + iv: user.iv, + tag: user.tag, + key: password.slice(0, 32).padStart(32 + (password.slice(0, 32).length - new Blob([password]).size), "0") + }); + } + if ( + user.encryptionVersion === UserEncryption.V2 && + user.protectedKey && + user.protectedKeyIV && + user.protectedKeyTag + ) { + const derivedKey = await argon2.hash(password, { + salt: Buffer.from(user.salt), + memoryCost: 65536, + timeCost: 3, + parallelism: 1, + hashLength: 32, + type: argon2.argon2id, + raw: true + }); + if (!derivedKey) throw new Error("Failed to derive key from password"); + const key = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: user.protectedKey, + iv: user.protectedKeyIV, + tag: user.protectedKeyTag, + key: derivedKey + }); + + const privateKey = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: user.encryptedPrivateKey, + iv: user.iv, + tag: user.tag, + key: Buffer.from(key, "hex") + }); + return privateKey; + } + throw new Error(`GetUserPrivateKey: Encryption version not found`); }; export const buildUserProjectKey = async (privateKey: string, publickey: string) => { diff --git a/backend/src/lib/dates/index.ts b/backend/src/lib/dates/index.ts index 169203e5b3..1b6e5dec05 100644 --- a/backend/src/lib/dates/index.ts +++ b/backend/src/lib/dates/index.ts @@ -1,3 +1,8 @@ export const daysToMillisecond = (days: number) => days * 24 * 60 * 60 * 1000; export const secondsToMillis = (seconds: number) => seconds * 1000; + +export const applyJitter = (delayMs: number, jitterMs: number) => { + const jitter = Math.floor(Math.random() * (2 * jitterMs)) - jitterMs; + return delayMs + jitter; +}; diff --git a/backend/src/lib/errors/index.ts b/backend/src/lib/errors/index.ts index 18b40acfd8..0818cfe7d9 100644 --- a/backend/src/lib/errors/index.ts +++ b/backend/src/lib/errors/index.ts @@ -23,6 +23,18 @@ export class InternalServerError extends Error { } } +export class GatewayTimeoutError extends Error { + name: string; + + error: unknown; + + constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) { + super(message || "Timeout error"); + this.name = name || "GatewayTimeoutError"; + this.error = error; + } +} + export class UnauthorizedError extends Error { name: string; @@ -40,9 +52,9 @@ export class ForbiddenRequestError extends Error { error: unknown; - constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) { + constructor({ name, error, message }: { message?: string; name?: string; error?: unknown } = {}) { super(message ?? "You are not allowed to access this resource"); - this.name = name || "ForbideenError"; + this.name = name || "ForbiddenError"; this.error = error; } } @@ -59,6 +71,25 @@ export class BadRequestError extends Error { } } +export class RateLimitError extends Error { + constructor({ message }: { message?: string }) { + super(message || "Rate limit exceeded"); + this.name = "RateLimitExceeded"; + } +} + +export class NotFoundError extends Error { + name: string; + + error: unknown; + + constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) { + super(message ?? "The requested entity is not found"); + this.name = name || "NotFound"; + this.error = error; + } +} + export class DisableRotationErrors extends Error { name: string; diff --git a/backend/src/lib/fn/argv.ts b/backend/src/lib/fn/argv.ts new file mode 100644 index 0000000000..174573414a --- /dev/null +++ b/backend/src/lib/fn/argv.ts @@ -0,0 +1 @@ +export const isMigrationMode = () => !!process.argv.slice(2).find((arg) => arg === "migration:latest"); // example -> ./binary migration:latest diff --git a/backend/src/lib/fn/array.ts b/backend/src/lib/fn/array.ts index 1e075101b2..760317bad1 100644 --- a/backend/src/lib/fn/array.ts +++ b/backend/src/lib/fn/array.ts @@ -17,6 +17,23 @@ export const groupBy = ( {} as Record ); +/** + * Sorts an array of items into groups. The return value is a map where the keys are + * the group ids the given getGroupId function produced and the value will be the last found one for the group key + */ +export const groupByUnique = ( + array: readonly T[], + getGroupId: (item: T) => Key +): Record => + array.reduce( + (acc, item) => { + const groupId = getGroupId(item); + acc[groupId] = item; + return acc; + }, + {} as Record + ); + /** * Given a list of items returns a new list with only * unique items. Accepts an optional identity function @@ -35,3 +52,54 @@ export const unique = (array: readonly T[ ); return Object.values(valueMap); }; + +/** + * Convert an array to a dictionary by mapping each item + * into a dictionary key & value + */ +export const objectify = ( + array: readonly T[], + getKey: (item: T) => Key, + getValue: (item: T) => Value = (item) => item as unknown as Value +): Record => { + return array.reduce( + (acc, item) => { + acc[getKey(item)] = getValue(item); + return acc; + }, + {} as Record + ); +}; + +/** + * Chunks an array into smaller arrays of the given size. + */ +export const chunkArray = (array: T[], chunkSize: number): T[][] => { + const chunks: T[][] = []; + for (let i = 0; i < array.length; i += chunkSize) { + chunks.push(array.slice(i, i + chunkSize)); + } + return chunks; +}; + +/* + * Returns all items from the first list that + * do not exist in the second list. + */ +export const diff = ( + root: readonly T[], + other: readonly T[], + identity: (item: T) => string | number | symbol = (t: T) => t as unknown as string | number | symbol +): T[] => { + if (!root?.length && !other?.length) return []; + if (root?.length === undefined) return [...other]; + if (!other?.length) return [...root]; + const bKeys = other.reduce( + (acc, item) => { + acc[identity(item)] = true; + return acc; + }, + {} as Record + ); + return root.filter((a) => !bKeys[identity(a)]); +}; diff --git a/backend/src/lib/fn/dates.ts b/backend/src/lib/fn/dates.ts index f9ea4db10a..cd5ca5c12c 100644 --- a/backend/src/lib/fn/dates.ts +++ b/backend/src/lib/fn/dates.ts @@ -1,2 +1,8 @@ export const getLastMidnightDateISO = (last = 1) => `${new Date(new Date().setDate(new Date().getDate() - last)).toISOString().slice(0, 10)}T00:00:00Z`; + +export const getTimeDifferenceInSeconds = (lhsTimestamp: string, rhsTimestamp: string) => { + const lhs = new Date(lhsTimestamp); + const rhs = new Date(rhsTimestamp); + return Math.floor((Number(lhs) - Number(rhs)) / 1000); +}; diff --git a/backend/src/lib/fn/index.ts b/backend/src/lib/fn/index.ts index 0d0f07e45f..82a4c49149 100644 --- a/backend/src/lib/fn/index.ts +++ b/backend/src/lib/fn/index.ts @@ -1,7 +1,9 @@ // Some of the functions are taken from https://github.com/rayepps/radash // Full credits goes to https://github.com/rayapps to those functions // Code taken to keep in in house and to adjust somethings for our needs +export * from "./argv"; export * from "./array"; export * from "./dates"; export * from "./object"; export * from "./string"; +export * from "./undefined"; diff --git a/backend/src/lib/fn/string.ts b/backend/src/lib/fn/string.ts index c3651fd4e2..26e8f27df1 100644 --- a/backend/src/lib/fn/string.ts +++ b/backend/src/lib/fn/string.ts @@ -9,3 +9,8 @@ export const removeTrailingSlash = (str: string) => { return str.endsWith("/") ? str.slice(0, -1) : str; }; + +export const prefixWithSlash = (str: string) => { + if (str.startsWith("/")) return str; + return `/${str}`; +}; diff --git a/backend/src/lib/fn/undefined.ts b/backend/src/lib/fn/undefined.ts new file mode 100644 index 0000000000..00f9d8e51e --- /dev/null +++ b/backend/src/lib/fn/undefined.ts @@ -0,0 +1,3 @@ +export const executeIfDefined = (func: (input: T) => R, input: T | undefined): R | undefined => { + return input === undefined ? undefined : func(input); +}; diff --git a/backend/src/lib/ip/index.ts b/backend/src/lib/ip/index.ts index 30f710d19b..6503165f66 100644 --- a/backend/src/lib/ip/index.ts +++ b/backend/src/lib/ip/index.ts @@ -1,6 +1,6 @@ import net from "node:net"; -import { UnauthorizedError } from "../errors"; +import { ForbiddenRequestError } from "../errors"; export enum IPType { IPV4 = "ipv4", @@ -126,7 +126,7 @@ export const checkIPAgainstBlocklist = ({ ipAddress, trustedIps }: { ipAddress: const check = blockList.check(ipAddress, type); if (!check) - throw new UnauthorizedError({ - message: "Failed to authenticate" + throw new ForbiddenRequestError({ + message: "You are not allowed to access this resource from the current IP address" }); }; diff --git a/backend/src/lib/knex/dynamic.ts b/backend/src/lib/knex/dynamic.ts new file mode 100644 index 0000000000..b8bc8ab57b --- /dev/null +++ b/backend/src/lib/knex/dynamic.ts @@ -0,0 +1,79 @@ +import { Knex } from "knex"; + +import { UnauthorizedError } from "../errors"; + +type TKnexDynamicPrimitiveOperator = { + operator: "eq" | "ne" | "startsWith" | "endsWith"; + value: string; + field: Extract; +}; + +type TKnexDynamicInOperator = { + operator: "in"; + value: string[] | number[]; + field: Extract; +}; + +type TKnexNonGroupOperator = TKnexDynamicInOperator | TKnexDynamicPrimitiveOperator; + +type TKnexGroupOperator = { + operator: "and" | "or" | "not"; + value: (TKnexNonGroupOperator | TKnexGroupOperator)[]; +}; + +export type TKnexDynamicOperator = TKnexGroupOperator | TKnexNonGroupOperator; + +export const buildDynamicKnexQuery = ( + rootQueryBuild: Knex.QueryBuilder, + dynamicQuery: TKnexDynamicOperator +) => { + const stack = [{ filterAst: dynamicQuery, queryBuilder: rootQueryBuild }]; + + while (stack.length) { + const { filterAst, queryBuilder } = stack.pop()!; + switch (filterAst.operator) { + case "eq": { + void queryBuilder.where(filterAst.field, "=", filterAst.value); + break; + } + case "ne": { + void queryBuilder.whereNot(filterAst.field, filterAst.value); + break; + } + case "startsWith": { + void queryBuilder.whereILike(filterAst.field, `${filterAst.value}%`); + break; + } + case "endsWith": { + void queryBuilder.whereILike(filterAst.field, `%${filterAst.value}`); + break; + } + case "and": { + filterAst.value.forEach((el) => { + void queryBuilder.andWhere((subQueryBuilder) => { + buildDynamicKnexQuery(subQueryBuilder, el); + }); + }); + break; + } + case "or": { + filterAst.value.forEach((el) => { + void queryBuilder.orWhere((subQueryBuilder) => { + buildDynamicKnexQuery(subQueryBuilder, el); + }); + }); + break; + } + case "not": { + filterAst.value.forEach((el) => { + void queryBuilder.whereNot((subQueryBuilder) => { + buildDynamicKnexQuery(subQueryBuilder, el); + }); + }); + break; + } + default: + throw new UnauthorizedError({ message: `Invalid knex dynamic operator: ${filterAst.operator}` }); + } + } +}; diff --git a/backend/src/lib/knex/index.ts b/backend/src/lib/knex/index.ts index 0faeba290b..f55d8e6e61 100644 --- a/backend/src/lib/knex/index.ts +++ b/backend/src/lib/knex/index.ts @@ -3,6 +3,7 @@ import { Knex } from "knex"; import { Tables } from "knex/types/tables"; import { DatabaseError } from "../errors"; +import { buildDynamicKnexQuery, TKnexDynamicOperator } from "./dynamic"; export * from "./connection"; export * from "./join"; @@ -19,23 +20,53 @@ export const withTransaction = (db: Knex, dal: K) => ({ export type TFindFilter = Partial & { $in?: Partial<{ [k in keyof R]: R[k][] }>; + $search?: Partial<{ [k in keyof R]: R[k] }>; + $complex?: TKnexDynamicOperator; }; export const buildFindFilter = - ({ $in, ...filter }: TFindFilter) => + ({ $in, $search, $complex, ...filter }: TFindFilter) => (bd: Knex.QueryBuilder) => { void bd.where(filter); if ($in) { Object.entries($in).forEach(([key, val]) => { - void bd.whereIn(key as never, val as never); + if (val) { + void bd.whereIn(key as never, val as never); + } }); } + if ($search) { + Object.entries($search).forEach(([key, val]) => { + if (val) { + void bd.whereILike(key as never, val as never); + } + }); + } + if ($complex) { + return buildDynamicKnexQuery(bd, $complex); + } return bd; }; -export type TFindOpt = { +export type TFindReturn = Array< + Awaited[0] & + (TCount extends true + ? { + count: string; + } + : unknown) +>; + +export type TFindOpt< + R extends object = object, + TCount extends boolean = boolean, + TCountDistinct extends keyof R | undefined = undefined +> = { limit?: number; offset?: number; sort?: Array<[keyof R, "asc" | "desc"] | [keyof R, "asc" | "desc", "first" | "last"]>; + groupBy?: keyof R; + count?: TCount; + countDistinct?: TCountDistinct; tx?: Knex; }; @@ -50,7 +81,7 @@ export const ormify = (db: Kne }), findById: async (id: string, tx?: Knex) => { try { - const result = await (tx || db)(tableName) + const result = await (tx || db.replicaNode())(tableName) .where({ id } as never) .first("*"); return result; @@ -60,24 +91,34 @@ export const ormify = (db: Kne }, findOne: async (filter: Partial, tx?: Knex) => { try { - const res = await (tx || db)(tableName).where(filter).first("*"); + const res = await (tx || db.replicaNode())(tableName).where(filter).first("*"); return res; } catch (error) { throw new DatabaseError({ error, name: "Find one" }); } }, - find: async ( + find: async < + TCount extends boolean = false, + TCountDistinct extends keyof Tables[Tname]["base"] | undefined = undefined + >( filter: TFindFilter, - { offset, limit, sort, tx }: TFindOpt = {} + { offset, limit, sort, count, tx, countDistinct }: TFindOpt = {} ) => { try { - const query = (tx || db)(tableName).where(buildFindFilter(filter)); + const query = (tx || db.replicaNode())(tableName).where(buildFindFilter(filter)); + if (countDistinct) { + void query.countDistinct(countDistinct); + } else if (count) { + void query.select(db.raw("COUNT(*) OVER() AS count")); + void query.select("*"); + } if (limit) void query.limit(limit); if (offset) void query.offset(offset); if (sort) { void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls }))); } - const res = await query; + + const res = (await query) as TFindReturn; return res; } catch (error) { throw new DatabaseError({ error, name: "Find one" }); @@ -104,6 +145,29 @@ export const ormify = (db: Kne throw new DatabaseError({ error, name: "Create" }); } }, + // This spilit the insert into multiple chunk + batchInsert: async (data: readonly Tables[Tname]["insert"][], tx?: Knex) => { + try { + if (!data.length) return []; + const res = await (tx || db).batchInsert(tableName, data as never).returning("*"); + return res as Tables[Tname]["base"][]; + } catch (error) { + throw new DatabaseError({ error, name: "batchInsert" }); + } + }, + upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => { + try { + if (!data.length) return []; + const res = await (tx || db)(tableName) + .insert(data as never) + .onConflict(onConflictField as never) + .merge() + .returning("*"); + return res; + } catch (error) { + throw new DatabaseError({ error, name: "Create" }); + } + }, updateById: async ( id: string, { diff --git a/backend/src/lib/knex/scim.ts b/backend/src/lib/knex/scim.ts new file mode 100644 index 0000000000..64f7fc2f61 --- /dev/null +++ b/backend/src/lib/knex/scim.ts @@ -0,0 +1,125 @@ +import { Knex } from "knex"; +import { Compare, Filter, parse } from "scim2-parse-filter"; + +const appendParentToGroupingOperator = (parentPath: string, filter: Filter) => { + if (filter.op !== "[]" && filter.op !== "and" && filter.op !== "or" && filter.op !== "not") { + return { ...filter, attrPath: `${parentPath}.${(filter as Compare).attrPath}` }; + } + return filter; +}; + +const processDynamicQuery = ( + rootQuery: Knex.QueryBuilder, + scimRootFilterAst: Filter, + getAttributeField: (attr: string) => string | null, + depth = 0 +) => { + if (depth > 20) return; + + const stack = [ + { + scimFilterAst: scimRootFilterAst, + query: rootQuery + } + ]; + + while (stack.length) { + const { scimFilterAst, query } = stack.pop()!; + switch (scimFilterAst.op) { + case "eq": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.where(attrPath, scimFilterAst.compValue); + break; + } + case "pr": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.whereNotNull(attrPath); + break; + } + case "gt": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.where(attrPath, ">", scimFilterAst.compValue); + break; + } + case "ge": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.where(attrPath, ">=", scimFilterAst.compValue); + break; + } + case "lt": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.where(attrPath, "<", scimFilterAst.compValue); + break; + } + case "le": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.where(attrPath, "<=", scimFilterAst.compValue); + break; + } + case "sw": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.whereILike(attrPath, `${scimFilterAst.compValue}%`); + break; + } + case "ew": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}`); + break; + } + case "co": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}%`); + break; + } + case "ne": { + const attrPath = getAttributeField(scimFilterAst.attrPath); + if (attrPath) void query.whereNot(attrPath, "=", scimFilterAst.compValue); + break; + } + case "and": { + scimFilterAst.filters.forEach((el) => { + void query.andWhere((subQueryBuilder) => { + processDynamicQuery(subQueryBuilder, el, getAttributeField, depth + 1); + }); + }); + break; + } + case "or": { + scimFilterAst.filters.forEach((el) => { + void query.orWhere((subQueryBuilder) => { + processDynamicQuery(subQueryBuilder, el, getAttributeField, depth + 1); + }); + }); + break; + } + case "not": { + void query.whereNot((subQueryBuilder) => { + processDynamicQuery(subQueryBuilder, scimFilterAst.filter, getAttributeField, depth + 1); + }); + break; + } + case "[]": { + void query.where((subQueryBuilder) => { + processDynamicQuery( + subQueryBuilder, + appendParentToGroupingOperator(scimFilterAst.attrPath, scimFilterAst.valFilter), + getAttributeField, + depth + 1 + ); + }); + break; + } + default: + break; + } + } +}; + +export const generateKnexQueryFromScim = ( + rootQuery: Knex.QueryBuilder, + rootScimFilter: string, + getAttributeField: (attr: string) => string | null +) => { + const scimRootFilterAst = parse(rootScimFilter); + return processDynamicQuery(rootQuery, scimRootFilterAst, getAttributeField); +}; diff --git a/backend/src/lib/knex/select.ts b/backend/src/lib/knex/select.ts index d7dfa77f0a..feccad4e38 100644 --- a/backend/src/lib/knex/select.ts +++ b/backend/src/lib/knex/select.ts @@ -12,3 +12,12 @@ export const stripUndefinedInWhere = (val: T): Exclude; }; + +// if its undefined its skipped in knex +// if its empty string its set as null +// else pass to the required one +export const setKnexStringValue = (value: string | null | undefined, cb: (arg: string) => T) => { + if (typeof value === "undefined") return; + if (value === "" || value === null) return null; + return cb(value); +}; diff --git a/backend/src/lib/logger/logger.ts b/backend/src/lib/logger/logger.ts index 5d1a63fc89..942efc40aa 100644 --- a/backend/src/lib/logger/logger.ts +++ b/backend/src/lib/logger/logger.ts @@ -58,7 +58,8 @@ const redactedKeys = [ "decryptedSecret", "secrets", "key", - "password" + "password", + "config" ]; export const initLogger = async () => { diff --git a/backend/src/lib/types/index.ts b/backend/src/lib/types/index.ts index 2c41f4d238..6ebf91f36f 100644 --- a/backend/src/lib/types/index.ts +++ b/backend/src/lib/types/index.ts @@ -42,3 +42,25 @@ export type RequiredKeys = { }[keyof T]; export type PickRequired = Pick>; + +export enum EnforcementLevel { + Hard = "hard", + Soft = "soft" +} + +export enum SecretSharingAccessType { + Anyone = "anyone", + Organization = "organization" +} + +export enum OrderByDirection { + ASC = "asc", + DESC = "desc" +} + +export type OrgServiceActor = { + type: ActorType; + id: string; + authMethod: ActorAuthMethod; + orgId: string; +}; diff --git a/backend/src/lib/validator/index.ts b/backend/src/lib/validator/index.ts index 6a70d85713..68cec8f4b6 100644 --- a/backend/src/lib/validator/index.ts +++ b/backend/src/lib/validator/index.ts @@ -1,2 +1,3 @@ export { isDisposableEmail } from "./validate-email"; -export { validateLocalIps } from "./validate-url"; +export { isValidFolderName, isValidSecretPath } from "./validate-folder-name"; +export { blockLocalAndPrivateIpAddresses } from "./validate-url"; diff --git a/backend/src/lib/validator/validate-email.ts b/backend/src/lib/validator/validate-email.ts index ec3ef3976f..c0c50155bc 100644 --- a/backend/src/lib/validator/validate-email.ts +++ b/backend/src/lib/validator/validate-email.ts @@ -1,10 +1,16 @@ import fs from "fs/promises"; import path from "path"; -export const isDisposableEmail = async (email: string) => { - const emailDomain = email.split("@")[1]; +export const isDisposableEmail = async (emails: string | string[]) => { const disposableEmails = await fs.readFile(path.join(__dirname, "disposable_emails.txt"), "utf8"); + if (Array.isArray(emails)) { + return emails.some((email) => { + const emailDomain = email.split("@")[1]; + return disposableEmails.split("\n").includes(emailDomain); + }); + } + const emailDomain = emails.split("@")[1]; if (disposableEmails.split("\n").includes(emailDomain)) return true; return false; }; diff --git a/backend/src/lib/validator/validate-folder-name.ts b/backend/src/lib/validator/validate-folder-name.ts new file mode 100644 index 0000000000..1fce780f0e --- /dev/null +++ b/backend/src/lib/validator/validate-folder-name.ts @@ -0,0 +1,8 @@ +// regex to allow only alphanumeric, dash, underscore +export const isValidFolderName = (name: string) => /^[a-zA-Z0-9-_]+$/.test(name); + +export const isValidSecretPath = (path: string) => + path + .split("/") + .filter((el) => el.length) + .every((name) => isValidFolderName(name)); diff --git a/backend/src/lib/validator/validate-url.ts b/backend/src/lib/validator/validate-url.ts index 9a953be1ae..fccebf47b2 100644 --- a/backend/src/lib/validator/validate-url.ts +++ b/backend/src/lib/validator/validate-url.ts @@ -1,7 +1,7 @@ import { getConfig } from "../config/env"; import { BadRequestError } from "../errors"; -export const validateLocalIps = (url: string) => { +export const blockLocalAndPrivateIpAddresses = (url: string) => { const validUrl = new URL(url); const appCfg = getConfig(); // on cloud local ips are not allowed diff --git a/backend/src/main.ts b/backend/src/main.ts index c0b9b24c5e..e47adcd90c 100644 --- a/backend/src/main.ts +++ b/backend/src/main.ts @@ -1,8 +1,12 @@ import dotenv from "dotenv"; +import path from "path"; -import { initDbConnection } from "./db"; +import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns"; + +import { initAuditLogDbConnection, initDbConnection } from "./db"; import { keyStoreFactory } from "./keystore/keystore"; -import { formatSmtpConfig, initEnvConfig } from "./lib/config/env"; +import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env"; +import { isMigrationMode } from "./lib/fn"; import { initLogger } from "./lib/logger"; import { initTelemetryInstrumentation } from "./lib/telemetry/instrumentation"; import { queueServiceFactory } from "./queue"; @@ -11,6 +15,7 @@ import { bootstrapCheck } from "./server/boot-strap-check"; import { smtpServiceFactory } from "./services/smtp/smtp-service"; dotenv.config(); + const run = async () => { const logger = await initLogger(); const appCfg = initEnvConfig(logger); @@ -27,19 +32,52 @@ const run = async () => { const db = initDbConnection({ dbConnectionUri: appCfg.DB_CONNECTION_URI, - dbRootCert: appCfg.DB_ROOT_CERT + dbRootCert: appCfg.DB_ROOT_CERT, + readReplicas: appCfg.DB_READ_REPLICAS?.map((el) => ({ + dbRootCert: el.DB_ROOT_CERT, + dbConnectionUri: el.DB_CONNECTION_URI + })) }); + const auditLogDb = appCfg.AUDIT_LOGS_DB_CONNECTION_URI + ? initAuditLogDbConnection({ + dbConnectionUri: appCfg.AUDIT_LOGS_DB_CONNECTION_URI, + dbRootCert: appCfg.AUDIT_LOGS_DB_ROOT_CERT + }) + : undefined; + + // Case: App is running in packaged mode (binary), and migration mode is enabled. + // Run the migrations and exit the process after completion. + if (IS_PACKAGED && isMigrationMode()) { + try { + logger.info("Running Postgres migrations.."); + await db.migrate.latest({ + directory: path.join(__dirname, "./db/migrations") + }); + logger.info("Postgres migrations completed"); + } catch (err) { + logger.error(err, "Failed to run migrations"); + process.exit(1); + } + + process.exit(0); + } + const smtp = smtpServiceFactory(formatSmtpConfig()); const queue = queueServiceFactory(appCfg.REDIS_URL); const keyStore = keyStoreFactory(appCfg.REDIS_URL); - const server = await main({ db, smtp, logger, queue, keyStore }); + const hsmModule = initializeHsmModule(); + hsmModule.initialize(); + + const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore }); const bootstrap = await bootstrapCheck({ db }); + // eslint-disable-next-line process.on("SIGINT", async () => { await server.close(); await db.destroy(); + hsmModule.finalize(); process.exit(0); }); @@ -47,6 +85,7 @@ const run = async () => { process.on("SIGTERM", async () => { await server.close(); await db.destroy(); + hsmModule.finalize(); process.exit(0); }); diff --git a/backend/src/queue/queue-service.ts b/backend/src/queue/queue-service.ts index 7046058b70..457eebcc1c 100644 --- a/backend/src/queue/queue-service.ts +++ b/backend/src/queue/queue-service.ts @@ -1,13 +1,17 @@ import { Job, JobsOptions, Queue, QueueOptions, RepeatOptions, Worker, WorkerListener } from "bullmq"; import Redis from "ioredis"; -import { SecretKeyEncoding } from "@app/db/schemas"; +import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas"; import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types"; import { TScanFullRepoEventPayload, TScanPushEventPayload } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types"; -import { TSyncSecretsDTO } from "@app/services/secret/secret-types"; +import { + TFailedIntegrationSyncEmailsPayload, + TIntegrationSyncPayload, + TSyncSecretsDTO +} from "@app/services/secret/secret-types"; export enum QueueName { SecretRotation = "secret-rotation", @@ -16,6 +20,7 @@ export enum QueueName { // TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue AuditLogPrune = "audit-log-prune", DailyResourceCleanUp = "daily-resource-cleanup", + DailyExpiringPkiItemAlert = "daily-expiring-pki-item-alert", TelemetryInstanceStats = "telemtry-self-hosted-stats", IntegrationSync = "sync-integrations", SecretWebhook = "secret-webhook", @@ -23,8 +28,12 @@ export enum QueueName { SecretPushEventScan = "secret-push-event-scan", UpgradeProjectToGhost = "upgrade-project-to-ghost", DynamicSecretRevocation = "dynamic-secret-revocation", + CaCrlRotation = "ca-crl-rotation", SecretReplication = "secret-replication", - SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication + SecretSync = "secret-sync", // parent queue to push integration sync, webhook, and secret replication + ProjectV3Migration = "project-v3-migration", + AccessTokenStatusUpdate = "access-token-status-update", + ImportSecretsFromExternalSource = "import-secrets-from-external-source" } export enum QueueJobs { @@ -34,15 +43,22 @@ export enum QueueJobs { // TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue AuditLogPrune = "audit-log-prune-job", DailyResourceCleanUp = "daily-resource-cleanup-job", + DailyExpiringPkiItemAlert = "daily-expiring-pki-item-alert", SecWebhook = "secret-webhook-trigger", TelemetryInstanceStats = "telemetry-self-hosted-stats", IntegrationSync = "secret-integration-pull", + SendFailedIntegrationSyncEmails = "send-failed-integration-sync-emails", SecretScan = "secret-scan", UpgradeProjectToGhost = "upgrade-project-to-ghost-job", DynamicSecretRevocation = "dynamic-secret-revocation", DynamicSecretPruning = "dynamic-secret-pruning", + CaCrlRotation = "ca-crl-rotation-job", SecretReplication = "secret-replication", - SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication + SecretSync = "secret-sync", // parent queue to push integration sync, webhook, and secret replication + ProjectV3Migration = "project-v3-migration", + IdentityAccessTokenStatusUpdate = "identity-access-token-status-update", + ServiceTokenStatusUpdate = "service-token-status-update", + ImportSecretsFromExternalSource = "import-secrets-from-external-source" } export type TQueueJobTypes = { @@ -55,7 +71,6 @@ export type TQueueJobTypes = { }; name: QueueJobs.SecretReminder; }; - [QueueName.SecretRotation]: { payload: { rotationId: string }; name: QueueJobs.SecretRotation; @@ -68,6 +83,10 @@ export type TQueueJobTypes = { name: QueueJobs.DailyResourceCleanUp; payload: undefined; }; + [QueueName.DailyExpiringPkiItemAlert]: { + name: QueueJobs.DailyExpiringPkiItemAlert; + payload: undefined; + }; [QueueName.AuditLogPrune]: { name: QueueJobs.AuditLogPrune; payload: undefined; @@ -76,16 +95,26 @@ export type TQueueJobTypes = { name: QueueJobs.SecWebhook; payload: { projectId: string; environment: string; secretPath: string; depth?: number }; }; - [QueueName.IntegrationSync]: { - name: QueueJobs.IntegrationSync; - payload: { - projectId: string; - environment: string; - secretPath: string; - depth?: number; - deDupeQueue?: Record; - }; - }; + + [QueueName.AccessTokenStatusUpdate]: + | { + name: QueueJobs.IdentityAccessTokenStatusUpdate; + payload: { identityAccessTokenId: string; numberOfUses: number }; + } + | { + name: QueueJobs.ServiceTokenStatusUpdate; + payload: { serviceTokenId: string }; + }; + + [QueueName.IntegrationSync]: + | { + name: QueueJobs.IntegrationSync; + payload: TIntegrationSyncPayload; + } + | { + name: QueueJobs.SendFailedIntegrationSyncEmails; + payload: TFailedIntegrationSyncEmailsPayload; + }; [QueueName.SecretFullRepoScan]: { name: QueueJobs.SecretScan; payload: TScanFullRepoEventPayload; @@ -121,6 +150,12 @@ export type TQueueJobTypes = { dynamicSecretCfgId: string; }; }; + [QueueName.CaCrlRotation]: { + name: QueueJobs.CaCrlRotation; + payload: { + caId: string; + }; + }; [QueueName.SecretReplication]: { name: QueueJobs.SecretReplication; payload: TSyncSecretsDTO; @@ -129,6 +164,23 @@ export type TQueueJobTypes = { name: QueueJobs.SecretSync; payload: TSyncSecretsDTO; }; + [QueueName.ProjectV3Migration]: { + name: QueueJobs.ProjectV3Migration; + payload: { projectId: string }; + }; + [QueueName.ImportSecretsFromExternalSource]: { + name: QueueJobs.ImportSecretsFromExternalSource; + payload: { + actorEmail: string; + data: { + iv: string; + tag: string; + ciphertext: string; + algorithm: SecretEncryptionAlgo; + encoding: SecretKeyEncoding; + }; + }; + }; }; export type TQueueServiceFactory = ReturnType; @@ -203,6 +255,7 @@ export const queueServiceFactory = (redisUrl: string) => { const job = await q.getJob(jobId); if (!job) return true; if (!job.repeatJobKey) return true; + await job.remove(); return q.removeRepeatableByKey(job.repeatJobKey); }; diff --git a/backend/src/server/app.ts b/backend/src/server/app.ts index 4e41826eae..cf7dd622a7 100644 --- a/backend/src/server/app.ts +++ b/backend/src/server/app.ts @@ -10,16 +10,18 @@ import fastifyFormBody from "@fastify/formbody"; import helmet from "@fastify/helmet"; import type { FastifyRateLimitOptions } from "@fastify/rate-limit"; import ratelimiter from "@fastify/rate-limit"; -import fasitfy from "fastify"; +import fastify from "fastify"; import { Knex } from "knex"; import { Logger } from "pino"; +import { HsmModule } from "@app/ee/services/hsm/hsm-types"; import { TKeyStoreFactory } from "@app/keystore/keystore"; -import { getConfig } from "@app/lib/config/env"; +import { getConfig, IS_PACKAGED } from "@app/lib/config/env"; import { TQueueServiceFactory } from "@app/queue"; import { TSmtpService } from "@app/services/smtp/smtp-service"; import { globalRateLimiterCfg } from "./config/rateLimiter"; +import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas"; import { apiMetrics } from "./plugins/api-metrics"; import { fastifyErrHandler } from "./plugins/error-handler"; import { registerExternalNextjs } from "./plugins/external-nextjs"; @@ -30,26 +32,45 @@ import { fastifySwagger } from "./plugins/swagger"; import { registerRoutes } from "./routes"; type TMain = { + auditLogDb?: Knex; db: Knex; smtp: TSmtpService; logger?: Logger; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory; + hsmModule: HsmModule; }; // Run the server! -export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => { +export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore }: TMain) => { const appCfg = getConfig(); - const server = fasitfy({ + + const server = fastify({ logger: appCfg.NODE_ENV === "test" ? false : logger, trustProxy: true, - connectionTimeout: 30 * 1000, - ignoreTrailingSlash: true + connectionTimeout: appCfg.isHsmConfigured ? 90_000 : 30_000, + ignoreTrailingSlash: true, + pluginTimeout: 40_000 }).withTypeProvider(); server.setValidatorCompiler(validatorCompiler); server.setSerializerCompiler(serializerCompiler); + server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => { + try { + const strBody = body instanceof Buffer ? body.toString() : body; + if (!strBody) { + done(null, undefined); + return; + } + const json: unknown = JSON.parse(strBody); + done(null, json); + } catch (err) { + const error = err as Error; + done(error, undefined); + } + }); + try { await server.register(cookie, { secret: appCfg.COOKIE_SECRET_SIGN_KEY @@ -61,6 +82,8 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => { credentials: true, origin: appCfg.SITE_URL || true }); + + await server.register(addErrorsToResponseSchemas); // pull ip based on various proxy headers await server.register(fastifyIp); @@ -76,16 +99,17 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => { if (appCfg.isProductionMode) { await server.register(ratelimiter, globalRateLimiterCfg()); } + await server.register(helmet, { contentSecurityPolicy: false }); await server.register(maintenanceMode); - await server.register(registerRoutes, { smtp, queue, db, keyStore }); + await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule }); if (appCfg.isProductionMode) { await server.register(registerExternalNextjs, { - standaloneMode: appCfg.STANDALONE_MODE, - dir: path.join(__dirname, "../../"), + standaloneMode: appCfg.STANDALONE_MODE || IS_PACKAGED, + dir: path.join(__dirname, IS_PACKAGED ? "../../../" : "../../"), port: appCfg.PORT }); } diff --git a/backend/src/server/config/rateLimiter.ts b/backend/src/server/config/rateLimiter.ts index 8c41eb2faa..176d44183b 100644 --- a/backend/src/server/config/rateLimiter.ts +++ b/backend/src/server/config/rateLimiter.ts @@ -2,6 +2,7 @@ import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-lim import { Redis } from "ioredis"; import { getConfig } from "@app/lib/config/env"; +import { RateLimitError } from "@app/lib/errors"; export const globalRateLimiterCfg = (): RateLimitPluginOptions => { const appCfg = getConfig(); @@ -10,6 +11,11 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => { : null; return { + errorResponseBuilder: (_, context) => { + throw new RateLimitError({ + message: `Rate limit exceeded. Please try again in ${context.after}` + }); + }, timeWindow: 60 * 1000, max: 600, redis, @@ -21,14 +27,16 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => { // GET endpoints export const readLimit: RateLimitOptions = { timeWindow: 60 * 1000, - max: 600, + hook: "preValidation", + max: (req) => req.rateLimits.readLimit, keyGenerator: (req) => req.realIp }; // POST, PATCH, PUT, DELETE endpoints export const writeLimit: RateLimitOptions = { timeWindow: 60 * 1000, - max: 200, // (too low, FA having issues so increasing it - maidul) + hook: "preValidation", + max: (req) => req.rateLimits.writeLimit, keyGenerator: (req) => req.realIp }; @@ -36,41 +44,52 @@ export const writeLimit: RateLimitOptions = { export const secretsLimit: RateLimitOptions = { // secrets, folders, secret imports timeWindow: 60 * 1000, - max: 60, + hook: "preValidation", + max: (req) => req.rateLimits.secretsLimit, keyGenerator: (req) => req.realIp }; export const authRateLimit: RateLimitOptions = { timeWindow: 60 * 1000, - max: 60, + hook: "preValidation", + max: (req) => req.rateLimits.authRateLimit, keyGenerator: (req) => req.realIp }; export const inviteUserRateLimit: RateLimitOptions = { timeWindow: 60 * 1000, - max: 30, + hook: "preValidation", + max: (req) => req.rateLimits.inviteUserRateLimit, keyGenerator: (req) => req.realIp }; export const mfaRateLimit: RateLimitOptions = { timeWindow: 60 * 1000, - max: 20, + hook: "preValidation", + max: (req) => req.rateLimits.mfaRateLimit, keyGenerator: (req) => { return req.headers.authorization?.split(" ")[1] || req.realIp; } }; -export const creationLimit: RateLimitOptions = { - // identity, project, org +// Public endpoints to avoid brute force attacks +export const publicEndpointLimit: RateLimitOptions = { + // Read Shared Secrets timeWindow: 60 * 1000, - max: 30, + hook: "preValidation", + max: (req) => req.rateLimits.publicEndpointLimit, keyGenerator: (req) => req.realIp }; -// Public endpoints to avoid brute force attacks -export const publicEndpointLimit: RateLimitOptions = { - // Shared Secrets +export const publicSecretShareCreationLimit: RateLimitOptions = { + // Create Shared Secrets timeWindow: 60 * 1000, - max: 30, + max: 5, + keyGenerator: (req) => req.realIp +}; + +export const userEngagementLimit: RateLimitOptions = { + timeWindow: 60 * 1000, + max: 5, keyGenerator: (req) => req.realIp }; diff --git a/backend/src/server/plugins/add-errors-to-response-schemas.ts b/backend/src/server/plugins/add-errors-to-response-schemas.ts new file mode 100644 index 0000000000..8eb358a1b4 --- /dev/null +++ b/backend/src/server/plugins/add-errors-to-response-schemas.ts @@ -0,0 +1,18 @@ +/* eslint-disable no-param-reassign */ +import fp from "fastify-plugin"; + +import { DefaultResponseErrorsSchema } from "../routes/sanitizedSchemas"; + +const isScimRoutes = (pathname: string) => + pathname.startsWith("/api/v1/scim/Users") || pathname.startsWith("/api/v1/scim/Groups"); + +export const addErrorsToResponseSchemas = fp(async (server) => { + server.addHook("onRoute", (routeOptions) => { + if (routeOptions.schema && routeOptions.schema.response && !isScimRoutes(routeOptions.path)) { + routeOptions.schema.response = { + ...DefaultResponseErrorsSchema, + ...routeOptions.schema.response + }; + } + }); +}); diff --git a/backend/src/server/plugins/audit-log.ts b/backend/src/server/plugins/audit-log.ts index 084b0cb54e..3f49778e85 100644 --- a/backend/src/server/plugins/audit-log.ts +++ b/backend/src/server/plugins/audit-log.ts @@ -70,7 +70,7 @@ export const injectAuditLogInfo = fp(async (server: FastifyZodProvider) => { metadata: {} }; } else { - throw new BadRequestError({ message: "Missing logic for other actor" }); + throw new BadRequestError({ message: "Invalid actor type provided" }); } req.auditLogInfo = payload; }); diff --git a/backend/src/server/plugins/auth/inject-identity.ts b/backend/src/server/plugins/auth/inject-identity.ts index d8814dd40c..9d239a405e 100644 --- a/backend/src/server/plugins/auth/inject-identity.ts +++ b/backend/src/server/plugins/auth/inject-identity.ts @@ -5,7 +5,7 @@ import jwt, { JwtPayload } from "jsonwebtoken"; import { TServiceTokens, TUsers } from "@app/db/schemas"; import { TScimTokenJwtPayload } from "@app/ee/services/scim/scim-types"; import { getConfig } from "@app/lib/config/env"; -import { UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError } from "@app/lib/errors"; import { ActorType, AuthMethod, AuthMode, AuthModeJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type"; import { TIdentityAccessTokenJwtPayload } from "@app/services/identity-access-token/identity-access-token-types"; @@ -18,6 +18,7 @@ export type TAuthMode = user: TUsers; orgId: string; authMethod: AuthMethod; + isMfaVerified?: boolean; } | { authMode: AuthMode.API_KEY; @@ -57,7 +58,6 @@ const extractAuth = async (req: FastifyRequest, jwtSecret: string) => { return { authMode: AuthMode.API_KEY, token: apiKey, actor: ActorType.USER } as const; } const authHeader = req.headers?.authorization; - if (!authHeader) return { authMode: null, token: null }; const authTokenValue = authHeader.slice(7); // slice of after Bearer @@ -103,12 +103,13 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => { server.decorateRequest("auth", null); server.addHook("onRequest", async (req) => { const appCfg = getConfig(); - const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET); - if (req.url.includes("/api/v3/auth/")) { + if (req.url.includes(".well-known/est") || req.url.includes("/api/v3/auth/")) { return; } + const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET); + if (!authMode) return; switch (authMode) { @@ -121,7 +122,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => { tokenVersionId, actor, orgId: orgId as string, - authMethod: token.authMethod + authMethod: token.authMethod, + isMfaVerified: token.isMfaVerified }; break; } @@ -167,7 +169,7 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => { break; } default: - throw new UnauthorizedError({ name: "Unknown token strategy" }); + throw new BadRequestError({ message: "Invalid token strategy provided" }); } }); }); diff --git a/backend/src/server/plugins/auth/superAdmin.ts b/backend/src/server/plugins/auth/superAdmin.ts index d5dee581b8..f5868f130d 100644 --- a/backend/src/server/plugins/auth/superAdmin.ts +++ b/backend/src/server/plugins/auth/superAdmin.ts @@ -1,6 +1,6 @@ import { FastifyReply, FastifyRequest, HookHandlerDoneFunction } from "fastify"; -import { UnauthorizedError } from "@app/lib/errors"; +import { ForbiddenRequestError } from "@app/lib/errors"; import { ActorType } from "@app/services/auth/auth-type"; export const verifySuperAdmin = ( @@ -9,9 +9,8 @@ export const verifySuperAdmin = ( done: HookHandlerDoneFunction ) => { if (req.auth.actor !== ActorType.USER || !req.auth.user.superAdmin) - throw new UnauthorizedError({ - name: "Unauthorized access", - message: "Requires superadmin access" + throw new ForbiddenRequestError({ + message: "Requires elevated super admin privileges" }); done(); }; diff --git a/backend/src/server/plugins/auth/verify-auth.ts b/backend/src/server/plugins/auth/verify-auth.ts index 3b3a239f73..44ea069dc7 100644 --- a/backend/src/server/plugins/auth/verify-auth.ts +++ b/backend/src/server/plugins/auth/verify-auth.ts @@ -1,6 +1,6 @@ import { FastifyReply, FastifyRequest, HookHandlerDoneFunction } from "fastify"; -import { UnauthorizedError } from "@app/lib/errors"; +import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors"; import { AuthMode } from "@app/services/auth/auth-type"; interface TAuthOptions { @@ -11,11 +11,11 @@ export const verifyAuth = (authStrategies: AuthMode[], options: TAuthOptions = { requireOrg: true }) => (req: T, _res: FastifyReply, done: HookHandlerDoneFunction) => { if (!Array.isArray(authStrategies)) throw new Error("Auth strategy must be array"); - if (!req.auth) throw new UnauthorizedError({ name: "Unauthorized access", message: "Token missing" }); + if (!req.auth) throw new UnauthorizedError({ message: "Token missing" }); const isAccessAllowed = authStrategies.some((strategy) => strategy === req.auth.authMode); if (!isAccessAllowed) { - throw new UnauthorizedError({ name: `${req.url} Unauthorized Access` }); + throw new ForbiddenRequestError({ name: `Forbidden access to ${req.url}` }); } // New optional option. There are some routes which do not require an organization ID to be present on the request. diff --git a/backend/src/server/plugins/error-handler.ts b/backend/src/server/plugins/error-handler.ts index c8da4077af..007902a176 100644 --- a/backend/src/server/plugins/error-handler.ts +++ b/backend/src/server/plugins/error-handler.ts @@ -1,31 +1,81 @@ import { ForbiddenError } from "@casl/ability"; import fastifyPlugin from "fastify-plugin"; +import jwt from "jsonwebtoken"; import { ZodError } from "zod"; import { BadRequestError, DatabaseError, + ForbiddenRequestError, + GatewayTimeoutError, InternalServerError, + NotFoundError, + RateLimitError, ScimRequestError, UnauthorizedError } from "@app/lib/errors"; +enum JWTErrors { + JwtExpired = "jwt expired", + JwtMalformed = "jwt malformed", + InvalidAlgorithm = "invalid algorithm" +} + +enum HttpStatusCodes { + BadRequest = 400, + NotFound = 404, + Unauthorized = 401, + Forbidden = 403, + // eslint-disable-next-line @typescript-eslint/no-shadow + InternalServerError = 500, + GatewayTimeout = 504, + TooManyRequests = 429 +} + export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider) => { server.setErrorHandler((error, req, res) => { req.log.error(error); if (error instanceof BadRequestError) { - void res.status(400).send({ statusCode: 400, message: error.message, error: error.name }); + void res + .status(HttpStatusCodes.BadRequest) + .send({ statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name }); + } else if (error instanceof NotFoundError) { + void res + .status(HttpStatusCodes.NotFound) + .send({ statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name }); } else if (error instanceof UnauthorizedError) { - void res.status(403).send({ statusCode: 403, message: error.message, error: error.name }); + void res + .status(HttpStatusCodes.Unauthorized) + .send({ statusCode: HttpStatusCodes.Unauthorized, message: error.message, error: error.name }); } else if (error instanceof DatabaseError || error instanceof InternalServerError) { - void res.status(500).send({ statusCode: 500, message: "Something went wrong", error: error.name }); + void res + .status(HttpStatusCodes.InternalServerError) + .send({ statusCode: HttpStatusCodes.InternalServerError, message: "Something went wrong", error: error.name }); + } else if (error instanceof GatewayTimeoutError) { + void res + .status(HttpStatusCodes.GatewayTimeout) + .send({ statusCode: HttpStatusCodes.GatewayTimeout, message: error.message, error: error.name }); } else if (error instanceof ZodError) { - void res.status(403).send({ statusCode: 403, error: "ValidationFailure", message: error.issues }); + void res + .status(HttpStatusCodes.Unauthorized) + .send({ statusCode: HttpStatusCodes.Unauthorized, error: "ValidationFailure", message: error.issues }); } else if (error instanceof ForbiddenError) { - void res.status(401).send({ - statusCode: 401, + void res.status(HttpStatusCodes.Forbidden).send({ + statusCode: HttpStatusCodes.Forbidden, error: "PermissionDenied", - message: `You are not allowed to ${error.action} on ${error.subjectType}` + message: `You are not allowed to ${error.action} on ${error.subjectType} - ${JSON.stringify(error.subject)}` + }); + } else if (error instanceof ForbiddenRequestError) { + void res.status(HttpStatusCodes.Forbidden).send({ + statusCode: HttpStatusCodes.Forbidden, + message: error.message, + error: error.name + }); + } else if (error instanceof RateLimitError) { + void res.status(HttpStatusCodes.TooManyRequests).send({ + statusCode: HttpStatusCodes.TooManyRequests, + message: error.message, + error: error.name }); } else if (error instanceof ScimRequestError) { void res.status(error.status).send({ @@ -33,8 +83,33 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider status: error.status, detail: error.detail }); + // Handle JWT errors and make them more human-readable for the end-user. + } else if (error instanceof jwt.JsonWebTokenError) { + const message = (() => { + if (error.message === JWTErrors.JwtExpired) { + return "Your token has expired. Please re-authenticate."; + } + if (error.message === JWTErrors.JwtMalformed) { + return "The provided access token is malformed. Please use a valid token or generate a new one and try again."; + } + if (error.message === JWTErrors.InvalidAlgorithm) { + return "The access token is signed with an invalid algorithm. Please provide a valid token and try again."; + } + + return error.message; + })(); + + void res.status(HttpStatusCodes.Forbidden).send({ + statusCode: HttpStatusCodes.Forbidden, + error: "TokenError", + message + }); } else { - void res.send(error); + void res.status(HttpStatusCodes.InternalServerError).send({ + statusCode: HttpStatusCodes.InternalServerError, + error: "InternalServerError", + message: "Something went wrong" + }); } }); }); diff --git a/backend/src/server/plugins/external-nextjs.ts b/backend/src/server/plugins/external-nextjs.ts index cc2fe371d4..7548170350 100644 --- a/backend/src/server/plugins/external-nextjs.ts +++ b/backend/src/server/plugins/external-nextjs.ts @@ -1,9 +1,10 @@ // this plugins allows to run infisical in standalone mode // standalone mode = infisical backend and nextjs frontend in one server // this way users don't need to deploy two things - import path from "node:path"; +import { IS_PACKAGED } from "@app/lib/config/env"; + // to enabled this u need to set standalone mode to true export const registerExternalNextjs = async ( server: FastifyZodProvider, @@ -18,20 +19,33 @@ export const registerExternalNextjs = async ( } ) => { if (standaloneMode) { - const nextJsBuildPath = path.join(dir, "frontend-build"); + const frontendName = IS_PACKAGED ? "frontend" : "frontend-build"; + const nextJsBuildPath = path.join(dir, frontendName); const { default: conf } = (await import( - path.join(dir, "frontend-build/.next/required-server-files.json"), + path.join(dir, `${frontendName}/.next/required-server-files.json`), // @ts-expect-error type { assert: { type: "json" } } )) as { default: { config: string } }; - /* eslint-disable */ - const { default: NextServer } = ( - await import(path.join(dir, "frontend-build/node_modules/next/dist/server/next-server.js")) - ).default; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let NextServer: any; + + if (!IS_PACKAGED) { + /* eslint-disable */ + const { default: nextServer } = ( + await import(path.join(dir, `${frontendName}/node_modules/next/dist/server/next-server.js`)) + ).default; + + NextServer = nextServer; + } else { + /* eslint-disable */ + const nextServer = await import(path.join(dir, `${frontendName}/node_modules/next/dist/server/next-server.js`)); + + NextServer = nextServer.default; + } const nextApp = new NextServer({ dev: false, diff --git a/backend/src/server/plugins/inject-rate-limits.ts b/backend/src/server/plugins/inject-rate-limits.ts new file mode 100644 index 0000000000..1674ea5424 --- /dev/null +++ b/backend/src/server/plugins/inject-rate-limits.ts @@ -0,0 +1,38 @@ +import fp from "fastify-plugin"; + +import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service"; +import { getConfig } from "@app/lib/config/env"; + +export const injectRateLimits = fp(async (server) => { + server.decorateRequest("rateLimits", null); + server.addHook("onRequest", async (req) => { + const appCfg = getConfig(); + + const instanceRateLimiterConfig = getRateLimiterConfig(); + if (!req.auth?.orgId) { + // for public endpoints, we always use the instance-wide default rate limits + req.rateLimits = instanceRateLimiterConfig; + return; + } + + const { rateLimits, customRateLimits } = await server.services.license.getPlan(req.auth.orgId); + + if (customRateLimits && !appCfg.isCloud) { + // we do this because for self-hosted/dedicated instances, we want custom rate limits to be based on admin configuration + // note that the syncing of custom rate limit happens on the instanceRateLimiterConfig object + req.rateLimits = instanceRateLimiterConfig; + return; + } + + // we're using the null coalescing operator in order to handle outdated licenses + req.rateLimits = { + readLimit: rateLimits?.readLimit ?? instanceRateLimiterConfig.readLimit, + writeLimit: rateLimits?.writeLimit ?? instanceRateLimiterConfig.writeLimit, + secretsLimit: rateLimits?.secretsLimit ?? instanceRateLimiterConfig.secretsLimit, + publicEndpointLimit: instanceRateLimiterConfig.publicEndpointLimit, + authRateLimit: instanceRateLimiterConfig.authRateLimit, + inviteUserRateLimit: instanceRateLimiterConfig.inviteUserRateLimit, + mfaRateLimit: instanceRateLimiterConfig.mfaRateLimit + }; + }); +}); diff --git a/backend/src/server/plugins/swagger.ts b/backend/src/server/plugins/swagger.ts index 99032bb6f7..c95672104b 100644 --- a/backend/src/server/plugins/swagger.ts +++ b/backend/src/server/plugins/swagger.ts @@ -15,8 +15,12 @@ export const fastifySwagger = fp(async (fastify) => { }, servers: [ { - url: "https://app.infisical.com", - description: "Production server" + url: "https://us.infisical.com", + description: "Production server (US)" + }, + { + url: "https://eu.infisical.com", + description: "Production server (EU)" }, { url: "http://localhost:8080", diff --git a/backend/src/server/routes/index.ts b/backend/src/server/routes/index.ts index 00590386a1..b9f46627bb 100644 --- a/backend/src/server/routes/index.ts +++ b/backend/src/server/routes/index.ts @@ -1,7 +1,10 @@ +import { CronJob } from "cron"; import { Knex } from "knex"; import { z } from "zod"; +import { registerCertificateEstRouter } from "@app/ee/routes/est/certificate-est-router"; import { registerV1EERoutes } from "@app/ee/routes/v1"; +import { registerV2EERoutes } from "@app/ee/routes/v2"; import { accessApprovalPolicyApproverDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal"; import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal"; import { accessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service"; @@ -13,26 +16,40 @@ import { auditLogQueueServiceFactory } from "@app/ee/services/audit-log/audit-lo import { auditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service"; import { auditLogStreamDALFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-dal"; import { auditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service"; +import { certificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal"; +import { certificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service"; +import { certificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service"; import { dynamicSecretDALFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-dal"; import { dynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service"; import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/providers"; import { dynamicSecretLeaseDALFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-dal"; import { dynamicSecretLeaseQueueServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue"; import { dynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service"; +import { externalKmsDALFactory } from "@app/ee/services/external-kms/external-kms-dal"; +import { externalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service"; import { groupDALFactory } from "@app/ee/services/group/group-dal"; import { groupServiceFactory } from "@app/ee/services/group/group-service"; import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal"; +import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service"; +import { HsmModule } from "@app/ee/services/hsm/hsm-types"; import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal"; import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service"; +import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service"; import { ldapConfigDALFactory } from "@app/ee/services/ldap-config/ldap-config-dal"; import { ldapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service"; import { ldapGroupMapDALFactory } from "@app/ee/services/ldap-config/ldap-group-map-dal"; import { licenseDALFactory } from "@app/ee/services/license/license-dal"; import { licenseServiceFactory } from "@app/ee/services/license/license-service"; +import { oidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal"; +import { oidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service"; import { permissionDALFactory } from "@app/ee/services/permission/permission-dal"; import { permissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { projectTemplateDALFactory } from "@app/ee/services/project-template/project-template-dal"; +import { projectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service"; import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal"; import { projectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service"; +import { rateLimitDALFactory } from "@app/ee/services/rate-limit/rate-limit-dal"; +import { rateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service"; import { samlConfigDALFactory } from "@app/ee/services/saml-config/saml-config-dal"; import { samlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service"; import { scimDALFactory } from "@app/ee/services/scim/scim-dal"; @@ -57,12 +74,14 @@ import { secretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/s import { snapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal"; import { snapshotFolderDALFactory } from "@app/ee/services/secret-snapshot/snapshot-folder-dal"; import { snapshotSecretDALFactory } from "@app/ee/services/secret-snapshot/snapshot-secret-dal"; +import { snapshotSecretV2DALFactory } from "@app/ee/services/secret-snapshot/snapshot-secret-v2-dal"; import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal"; import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service"; import { TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { TQueueServiceFactory } from "@app/queue"; import { readLimit } from "@app/server/config/rateLimiter"; +import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue"; import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal"; import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service"; import { authDALFactory } from "@app/services/auth/auth-dal"; @@ -71,10 +90,27 @@ import { authPaswordServiceFactory } from "@app/services/auth/auth-password-serv import { authSignupServiceFactory } from "@app/services/auth/auth-signup-service"; import { tokenDALFactory } from "@app/services/auth-token/auth-token-dal"; import { tokenServiceFactory } from "@app/services/auth-token/auth-token-service"; +import { certificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal"; +import { certificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { certificateServiceFactory } from "@app/services/certificate/certificate-service"; +import { certificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal"; +import { certificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; +import { certificateAuthorityQueueFactory } from "@app/services/certificate-authority/certificate-authority-queue"; +import { certificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal"; +import { certificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service"; +import { certificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal"; +import { certificateTemplateEstConfigDALFactory } from "@app/services/certificate-template/certificate-template-est-config-dal"; +import { certificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service"; +import { cmekServiceFactory } from "@app/services/cmek/cmek-service"; +import { externalGroupOrgRoleMappingDALFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-dal"; +import { externalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service"; +import { externalMigrationQueueFactory } from "@app/services/external-migration/external-migration-queue"; +import { externalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service"; import { groupProjectDALFactory } from "@app/services/group-project/group-project-dal"; import { groupProjectMembershipRoleDALFactory } from "@app/services/group-project/group-project-membership-role-dal"; import { groupProjectServiceFactory } from "@app/services/group-project/group-project-service"; import { identityDALFactory } from "@app/services/identity/identity-dal"; +import { identityMetadataDALFactory } from "@app/services/identity/identity-metadata-dal"; import { identityOrgDALFactory } from "@app/services/identity/identity-org-dal"; import { identityServiceFactory } from "@app/services/identity/identity-service"; import { identityAccessTokenDALFactory } from "@app/services/identity-access-token/identity-access-token-dal"; @@ -87,9 +123,13 @@ import { identityGcpAuthDALFactory } from "@app/services/identity-gcp-auth/ident import { identityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service"; import { identityKubernetesAuthDALFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-dal"; import { identityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service"; +import { identityOidcAuthDALFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-dal"; +import { identityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service"; import { identityProjectDALFactory } from "@app/services/identity-project/identity-project-dal"; import { identityProjectMembershipRoleDALFactory } from "@app/services/identity-project/identity-project-membership-role-dal"; import { identityProjectServiceFactory } from "@app/services/identity-project/identity-project-service"; +import { identityTokenAuthDALFactory } from "@app/services/identity-token-auth/identity-token-auth-dal"; +import { identityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service"; import { identityUaClientSecretDALFactory } from "@app/services/identity-ua/identity-ua-client-secret-dal"; import { identityUaDALFactory } from "@app/services/identity-ua/identity-ua-dal"; import { identityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service"; @@ -97,7 +137,8 @@ import { integrationDALFactory } from "@app/services/integration/integration-dal import { integrationServiceFactory } from "@app/services/integration/integration-service"; import { integrationAuthDALFactory } from "@app/services/integration-auth/integration-auth-dal"; import { integrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service"; -import { kmsDALFactory } from "@app/services/kms/kms-dal"; +import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal"; +import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal"; import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal"; import { kmsServiceFactory } from "@app/services/kms/kms-service"; import { incidentContactDALFactory } from "@app/services/org/incident-contacts-dal"; @@ -106,7 +147,14 @@ import { orgDALFactory } from "@app/services/org/org-dal"; import { orgRoleDALFactory } from "@app/services/org/org-role-dal"; import { orgRoleServiceFactory } from "@app/services/org/org-role-service"; import { orgServiceFactory } from "@app/services/org/org-service"; +import { orgAdminServiceFactory } from "@app/services/org-admin/org-admin-service"; import { orgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; +import { dailyExpiringPkiItemAlertQueueServiceFactory } from "@app/services/pki-alert/expiring-pki-item-alert-queue"; +import { pkiAlertDALFactory } from "@app/services/pki-alert/pki-alert-dal"; +import { pkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service"; +import { pkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal"; +import { pkiCollectionItemDALFactory } from "@app/services/pki-collection/pki-collection-item-dal"; +import { pkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service"; import { projectDALFactory } from "@app/services/project/project-dal"; import { projectQueueFactory } from "@app/services/project/project-queue"; import { projectServiceFactory } from "@app/services/project/project-service"; @@ -138,23 +186,36 @@ import { secretSharingDALFactory } from "@app/services/secret-sharing/secret-sha import { secretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service"; import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; import { secretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service"; +import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; +import { secretV2BridgeServiceFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-service"; +import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal"; +import { secretVersionV2TagBridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal"; import { serviceTokenDALFactory } from "@app/services/service-token/service-token-dal"; import { serviceTokenServiceFactory } from "@app/services/service-token/service-token-service"; +import { projectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal"; +import { slackIntegrationDALFactory } from "@app/services/slack/slack-integration-dal"; +import { slackServiceFactory } from "@app/services/slack/slack-service"; import { TSmtpService } from "@app/services/smtp/smtp-service"; import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal"; import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admin/super-admin-service"; import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal"; import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue"; import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service"; +import { totpConfigDALFactory } from "@app/services/totp/totp-config-dal"; +import { totpServiceFactory } from "@app/services/totp/totp-service"; import { userDALFactory } from "@app/services/user/user-dal"; import { userServiceFactory } from "@app/services/user/user-service"; import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; +import { userEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service"; import { webhookDALFactory } from "@app/services/webhook/webhook-dal"; import { webhookServiceFactory } from "@app/services/webhook/webhook-service"; +import { workflowIntegrationDALFactory } from "@app/services/workflow-integration/workflow-integration-dal"; +import { workflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service"; import { injectAuditLogInfo } from "../plugins/audit-log"; import { injectIdentity } from "../plugins/auth/inject-identity"; import { injectPermission } from "../plugins/auth/inject-permission"; +import { injectRateLimits } from "../plugins/inject-rate-limits"; import { registerSecretScannerGhApp } from "../plugins/secret-scanner"; import { registerV1Routes } from "./v1"; import { registerV2Routes } from "./v2"; @@ -163,16 +224,23 @@ import { registerV3Routes } from "./v3"; export const registerRoutes = async ( server: FastifyZodProvider, { + auditLogDb, db, + hsmModule, smtp: smtpService, queue: queueService, keyStore - }: { db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory } + }: { + auditLogDb?: Knex; + db: Knex; + hsmModule: HsmModule; + smtp: TSmtpService; + queue: TQueueServiceFactory; + keyStore: TKeyStoreFactory; + } ) => { const appCfg = getConfig(); - if (!appCfg.DISABLE_SECRET_SCANNING) { - await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" }); - } + await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" }); // db layers const userDAL = userDALFactory(db); @@ -185,6 +253,7 @@ export const registerRoutes = async ( const incidentContactDAL = incidentContactDALFactory(db); const orgRoleDAL = orgRoleDALFactory(db); const superAdminDAL = superAdminDALFactory(db); + const rateLimitDAL = rateLimitDALFactory(db); const apiKeyDAL = apiKeyDALFactory(db); const projectDAL = projectDALFactory(db); @@ -205,26 +274,33 @@ export const registerRoutes = async ( const secretVersionTagDAL = secretVersionTagDALFactory(db); const secretBlindIndexDAL = secretBlindIndexDALFactory(db); + const secretV2BridgeDAL = secretV2BridgeDALFactory(db); + const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db); + const secretVersionTagV2BridgeDAL = secretVersionV2TagBridgeDALFactory(db); + const integrationDAL = integrationDALFactory(db); const integrationAuthDAL = integrationAuthDALFactory(db); const webhookDAL = webhookDALFactory(db); const serviceTokenDAL = serviceTokenDALFactory(db); const identityDAL = identityDALFactory(db); + const identityMetadataDAL = identityMetadataDALFactory(db); const identityAccessTokenDAL = identityAccessTokenDALFactory(db); const identityOrgMembershipDAL = identityOrgDALFactory(db); const identityProjectDAL = identityProjectDALFactory(db); const identityProjectMembershipRoleDAL = identityProjectMembershipRoleDALFactory(db); const identityProjectAdditionalPrivilegeDAL = identityProjectAdditionalPrivilegeDALFactory(db); + const identityTokenAuthDAL = identityTokenAuthDALFactory(db); const identityUaDAL = identityUaDALFactory(db); const identityKubernetesAuthDAL = identityKubernetesAuthDALFactory(db); const identityUaClientSecretDAL = identityUaClientSecretDALFactory(db); const identityAwsAuthDAL = identityAwsAuthDALFactory(db); const identityGcpAuthDAL = identityGcpAuthDALFactory(db); + const identityOidcAuthDAL = identityOidcAuthDALFactory(db); const identityAzureAuthDAL = identityAzureAuthDALFactory(db); - const auditLogDAL = auditLogDALFactory(db); + const auditLogDAL = auditLogDALFactory(auditLogDb ?? db); const auditLogStreamDAL = auditLogStreamDALFactory(db); const trustedIpDAL = trustedIpDALFactory(db); const telemetryDAL = telemetryDALFactory(db); @@ -236,6 +312,7 @@ export const registerRoutes = async ( const ldapConfigDAL = ldapConfigDALFactory(db); const ldapGroupMapDAL = ldapGroupMapDALFactory(db); + const oidcConfigDAL = oidcConfigDALFactory(db); const accessApprovalPolicyDAL = accessApprovalPolicyDALFactory(db); const accessApprovalRequestDAL = accessApprovalRequestDALFactory(db); const accessApprovalPolicyApproverDAL = accessApprovalPolicyApproverDALFactory(db); @@ -250,6 +327,7 @@ export const registerRoutes = async ( const secretRotationDAL = secretRotationDALFactory(db); const snapshotDAL = snapshotDALFactory(db); const snapshotSecretDAL = snapshotSecretDALFactory(db); + const snapshotSecretV2BridgeDAL = snapshotSecretV2DALFactory(db); const snapshotFolderDAL = snapshotFolderDALFactory(db); const gitAppInstallSessionDAL = gitAppInstallSessionDALFactory(db); @@ -264,9 +342,20 @@ export const registerRoutes = async ( const dynamicSecretDAL = dynamicSecretDALFactory(db); const dynamicSecretLeaseDAL = dynamicSecretLeaseDALFactory(db); - const kmsDAL = kmsDALFactory(db); + const kmsDAL = kmskeyDALFactory(db); + const internalKmsDAL = internalKmsDALFactory(db); + const externalKmsDAL = externalKmsDALFactory(db); const kmsRootConfigDAL = kmsRootConfigDALFactory(db); + const slackIntegrationDAL = slackIntegrationDALFactory(db); + const projectSlackConfigDAL = projectSlackConfigDALFactory(db); + const workflowIntegrationDAL = workflowIntegrationDALFactory(db); + const totpConfigDAL = totpConfigDALFactory(db); + + const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db); + + const projectTemplateDAL = projectTemplateDALFactory(db); + const permissionService = permissionServiceFactory({ permissionDAL, orgRoleDAL, @@ -275,10 +364,27 @@ export const registerRoutes = async ( projectDAL }); const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore }); + + const hsmService = hsmServiceFactory({ + hsmModule + }); + const kmsService = kmsServiceFactory({ kmsRootConfigDAL, keyStore, - kmsDAL + kmsDAL, + internalKmsDAL, + orgDAL, + projectDAL, + hsmService + }); + + const externalKmsService = externalKmsServiceFactory({ + kmsDAL, + kmsService, + permissionService, + externalKmsDAL, + licenseService }); const trustedIpService = trustedIpServiceFactory({ @@ -302,15 +408,17 @@ export const registerRoutes = async ( auditLogStreamDAL }); const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({ - projectMembershipDAL, projectEnvDAL, secretApprovalPolicyApproverDAL: sapApproverDAL, permissionService, - secretApprovalPolicyDAL + secretApprovalPolicyDAL, + licenseService, + userDAL }); - const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL }); + const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL, orgMembershipDAL }); const samlService = samlConfigServiceFactory({ + identityMetadataDAL, permissionService, orgBotDAL, orgDAL, @@ -353,6 +461,7 @@ export const registerRoutes = async ( orgDAL, orgMembershipDAL, projectDAL, + projectUserAdditionalPrivilegeDAL, projectMembershipDAL, groupDAL, groupProjectDAL, @@ -360,7 +469,8 @@ export const registerRoutes = async ( projectKeyDAL, projectBotDAL, permissionService, - smtpService + smtpService, + externalGroupOrgRoleMappingDAL }); const ldapService = ldapConfigServiceFactory({ @@ -378,7 +488,9 @@ export const registerRoutes = async ( userDAL, userAliasDAL, permissionService, - licenseService + licenseService, + tokenService, + smtpService }); const telemetryService = telemetryServiceFactory({ @@ -396,31 +508,53 @@ export const registerRoutes = async ( userAliasDAL, orgMembershipDAL, tokenService, - smtpService + permissionService, + groupProjectDAL, + smtpService, + projectMembershipDAL }); - const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, tokenDAL: authTokenDAL }); + + const totpService = totpServiceFactory({ + totpConfigDAL, + userDAL, + kmsService + }); + + const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, totpService }); const passwordService = authPaswordServiceFactory({ tokenService, smtpService, authDAL, - userDAL + userDAL, + totpConfigDAL }); + + const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL }); + const orgService = orgServiceFactory({ userAliasDAL, + identityMetadataDAL, licenseService, samlConfigDAL, orgRoleDAL, permissionService, orgDAL, + projectBotDAL, incidentContactDAL, tokenService, + projectUserAdditionalPrivilegeDAL, + projectUserMembershipRoleDAL, + projectRoleDAL, projectDAL, projectMembershipDAL, + orgMembershipDAL, projectKeyDAL, smtpService, userDAL, groupDAL, - orgBotDAL + orgBotDAL, + oidcConfigDAL, + projectBotService }); const signupService = authSignupServiceFactory({ tokenService, @@ -432,17 +566,42 @@ export const registerRoutes = async ( projectDAL, projectBotDAL, groupProjectDAL, + projectMembershipDAL, + projectUserMembershipRoleDAL, orgDAL, orgService, licenseService }); - const orgRoleService = orgRoleServiceFactory({ permissionService, orgRoleDAL }); + const orgRoleService = orgRoleServiceFactory({ + permissionService, + orgRoleDAL, + orgDAL, + externalGroupOrgRoleMappingDAL + }); const superAdminService = superAdminServiceFactory({ userDAL, authService: loginService, serverCfgDAL: superAdminDAL, + kmsRootConfigDAL, orgService, - keyStore + keyStore, + licenseService, + kmsService + }); + + const orgAdminService = orgAdminServiceFactory({ + projectDAL, + permissionService, + projectUserMembershipRoleDAL, + userDAL, + projectBotDAL, + projectKeyDAL, + projectMembershipDAL + }); + + const rateLimitService = rateLimitServiceFactory({ + rateLimitDAL, + licenseService }); const apiKeyService = apiKeyServiceFactory({ apiKeyDAL, userDAL }); @@ -460,7 +619,6 @@ export const registerRoutes = async ( secretScanningDAL, secretScanningQueue }); - const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL }); const projectMembershipService = projectMembershipServiceFactory({ projectMembershipDAL, @@ -470,10 +628,12 @@ export const registerRoutes = async ( projectBotDAL, orgDAL, userDAL, + projectUserAdditionalPrivilegeDAL, userGroupMembershipDAL, smtpService, projectKeyDAL, projectRoleDAL, + groupProjectDAL, licenseService }); const projectUserAdditionalPrivilegeService = projectUserAdditionalPrivilegeServiceFactory({ @@ -506,30 +666,143 @@ export const registerRoutes = async ( projectUserMembershipRoleDAL }); + const certificateAuthorityDAL = certificateAuthorityDALFactory(db); + const certificateAuthorityCertDAL = certificateAuthorityCertDALFactory(db); + const certificateAuthoritySecretDAL = certificateAuthoritySecretDALFactory(db); + const certificateAuthorityCrlDAL = certificateAuthorityCrlDALFactory(db); + const certificateTemplateDAL = certificateTemplateDALFactory(db); + const certificateTemplateEstConfigDAL = certificateTemplateEstConfigDALFactory(db); + + const certificateDAL = certificateDALFactory(db); + const certificateBodyDAL = certificateBodyDALFactory(db); + + const pkiAlertDAL = pkiAlertDALFactory(db); + const pkiCollectionDAL = pkiCollectionDALFactory(db); + const pkiCollectionItemDAL = pkiCollectionItemDALFactory(db); + + const certificateService = certificateServiceFactory({ + certificateDAL, + certificateBodyDAL, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + certificateAuthorityCrlDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService, + permissionService + }); + + const certificateAuthorityQueue = certificateAuthorityQueueFactory({ + certificateAuthorityCrlDAL, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + certificateDAL, + projectDAL, + kmsService, + queueService + }); + + const certificateAuthorityService = certificateAuthorityServiceFactory({ + certificateAuthorityDAL, + certificateAuthorityCertDAL, + certificateAuthoritySecretDAL, + certificateAuthorityCrlDAL, + certificateTemplateDAL, + certificateAuthorityQueue, + certificateDAL, + certificateBodyDAL, + pkiCollectionDAL, + pkiCollectionItemDAL, + projectDAL, + kmsService, + permissionService + }); + + const certificateAuthorityCrlService = certificateAuthorityCrlServiceFactory({ + certificateAuthorityDAL, + certificateAuthorityCrlDAL, + projectDAL, + kmsService, + permissionService + // licenseService + }); + + const certificateTemplateService = certificateTemplateServiceFactory({ + certificateTemplateDAL, + certificateTemplateEstConfigDAL, + certificateAuthorityDAL, + permissionService, + kmsService, + projectDAL, + licenseService + }); + + const certificateEstService = certificateEstServiceFactory({ + certificateAuthorityService, + certificateTemplateService, + certificateTemplateDAL, + certificateAuthorityCertDAL, + certificateAuthorityDAL, + projectDAL, + kmsService, + licenseService + }); + + const pkiAlertService = pkiAlertServiceFactory({ + pkiAlertDAL, + pkiCollectionDAL, + permissionService, + smtpService + }); + + const pkiCollectionService = pkiCollectionServiceFactory({ + pkiCollectionDAL, + pkiCollectionItemDAL, + certificateAuthorityDAL, + certificateDAL, + permissionService + }); + + const projectTemplateService = projectTemplateServiceFactory({ + licenseService, + permissionService, + projectTemplateDAL + }); + const projectService = projectServiceFactory({ permissionService, projectDAL, projectQueue: projectQueueService, - secretBlindIndexDAL, identityProjectDAL, identityOrgMembershipDAL, - projectBotDAL, projectKeyDAL, userDAL, projectEnvDAL, orgDAL, orgService, projectMembershipDAL, + projectRoleDAL, folderDAL, licenseService, + certificateAuthorityDAL, + certificateDAL, + pkiAlertDAL, + pkiCollectionDAL, projectUserMembershipRoleDAL, identityProjectMembershipRoleDAL, - keyStore + keyStore, + kmsService, + projectBotDAL, + certificateTemplateDAL, + projectSlackConfigDAL, + slackIntegrationDAL, + projectTemplateService }); const projectEnvService = projectEnvServiceFactory({ permissionService, projectEnvDAL, + keyStore, licenseService, projectDAL, folderDAL @@ -554,12 +827,19 @@ export const registerRoutes = async ( secretVersionDAL, folderVersionDAL, secretTagDAL, - secretVersionTagDAL + secretVersionTagDAL, + projectBotService, + kmsService, + secretV2BridgeDAL, + secretVersionV2BridgeDAL, + snapshotSecretV2BridgeDAL, + secretVersionV2TagBridgeDAL: secretVersionTagV2BridgeDAL }); const webhookService = webhookServiceFactory({ permissionService, webhookDAL, - projectEnvDAL + projectEnvDAL, + projectDAL }); const secretTagService = secretTagServiceFactory({ secretTagDAL, permissionService }); @@ -576,10 +856,11 @@ export const registerRoutes = async ( integrationAuthDAL, integrationDAL, permissionService, - projectBotDAL, - projectBotService + projectBotService, + kmsService }); const secretQueueService = secretQueueFactory({ + keyStore, queueService, secretDAL, folderDAL, @@ -590,6 +871,8 @@ export const registerRoutes = async ( projectEnvDAL, webhookDAL, orgDAL, + auditLogService, + userDAL, projectMembershipDAL, smtpService, projectDAL, @@ -597,42 +880,54 @@ export const registerRoutes = async ( secretVersionDAL, secretBlindIndexDAL, secretTagDAL, - secretVersionTagDAL + secretVersionTagDAL, + kmsService, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + secretVersionTagV2BridgeDAL, + secretRotationDAL, + integrationAuthDAL, + snapshotDAL, + snapshotSecretV2BridgeDAL, + secretApprovalRequestDAL, + projectKeyDAL, + projectUserMembershipRoleDAL, + orgService }); const secretImportService = secretImportServiceFactory({ licenseService, + projectBotService, projectEnvDAL, folderDAL, permissionService, secretImportDAL, projectDAL, secretDAL, - secretQueueService + secretQueueService, + secretV2BridgeDAL, + kmsService }); const secretBlindIndexService = secretBlindIndexServiceFactory({ permissionService, secretDAL, secretBlindIndexDAL }); - const secretService = secretServiceFactory({ - folderDAL, - secretVersionDAL, - secretVersionTagDAL, - secretBlindIndexDAL, - permissionService, - projectDAL, - secretDAL, - secretTagDAL, - snapshotService, - secretQueueService, - secretImportDAL, - projectEnvDAL, - projectBotService - }); - const secretSharingService = secretSharingServiceFactory({ + const secretV2BridgeService = secretV2BridgeServiceFactory({ + folderDAL, + secretVersionDAL: secretVersionV2BridgeDAL, + secretQueueService, + secretDAL: secretV2BridgeDAL, permissionService, - secretSharingDAL + secretVersionTagDAL: secretVersionTagV2BridgeDAL, + secretTagDAL, + projectEnvDAL, + secretImportDAL, + secretApprovalRequestDAL, + secretApprovalPolicyService, + secretApprovalRequestSecretDAL, + kmsService, + snapshotService }); const secretApprovalRequestService = secretApprovalRequestServiceFactory({ @@ -649,16 +944,56 @@ export const registerRoutes = async ( secretApprovalRequestDAL, snapshotService, secretVersionTagDAL, - secretQueueService + secretQueueService, + kmsService, + secretV2BridgeDAL, + secretApprovalPolicyDAL, + secretVersionV2BridgeDAL, + secretVersionTagV2BridgeDAL, + smtpService, + projectEnvDAL, + userDAL, + licenseService, + projectSlackConfigDAL + }); + + const secretService = secretServiceFactory({ + folderDAL, + secretVersionDAL, + secretVersionTagDAL, + secretBlindIndexDAL, + permissionService, + projectDAL, + secretDAL, + secretTagDAL, + snapshotService, + secretQueueService, + secretImportDAL, + projectEnvDAL, + projectBotService, + secretApprovalPolicyService, + secretApprovalRequestDAL, + secretApprovalRequestSecretDAL, + secretV2BridgeService, + secretApprovalRequestService + }); + + const secretSharingService = secretSharingServiceFactory({ + permissionService, + secretSharingDAL, + orgDAL, + kmsService }); const accessApprovalPolicyService = accessApprovalPolicyServiceFactory({ accessApprovalPolicyDAL, accessApprovalPolicyApproverDAL, + groupDAL, permissionService, projectEnvDAL, projectMembershipDAL, - projectDAL + projectDAL, + userDAL }); const accessApprovalRequestService = accessApprovalRequestServiceFactory({ @@ -672,7 +1007,10 @@ export const registerRoutes = async ( projectEnvDAL, userDAL, smtpService, - accessApprovalPolicyApproverDAL + accessApprovalPolicyApproverDAL, + projectSlackConfigDAL, + kmsService, + groupDAL }); const secretReplicationService = secretReplicationServiceFactory({ @@ -685,12 +1023,14 @@ export const registerRoutes = async ( queueService, folderDAL, secretApprovalPolicyService, - secretBlindIndexDAL, secretApprovalRequestDAL, secretApprovalRequestSecretDAL, secretQueueService, - projectMembershipDAL, - projectBotService + projectBotService, + kmsService, + secretV2BridgeDAL, + secretVersionV2TagBridgeDAL: secretVersionTagV2BridgeDAL, + secretVersionV2BridgeDAL }); const secretRotationQueue = secretRotationQueueFactory({ telemetryService, @@ -698,7 +1038,10 @@ export const registerRoutes = async ( queue: queueService, secretDAL, secretVersionDAL, - projectBotService + projectBotService, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + kmsService }); const secretRotationService = secretRotationServiceFactory({ @@ -708,7 +1051,9 @@ export const registerRoutes = async ( projectDAL, licenseService, secretDAL, - folderDAL + folderDAL, + projectBotService, + secretV2BridgeDAL }); const integrationService = integrationServiceFactory({ @@ -716,25 +1061,46 @@ export const registerRoutes = async ( folderDAL, integrationDAL, integrationAuthDAL, - secretQueueService + secretQueueService, + integrationAuthService, + projectBotService, + secretV2BridgeDAL, + secretImportDAL, + secretDAL, + kmsService }); + + const accessTokenQueue = accessTokenQueueServiceFactory({ + keyStore, + identityAccessTokenDAL, + queueService, + serviceTokenDAL + }); + const serviceTokenService = serviceTokenServiceFactory({ projectEnvDAL, serviceTokenDAL, userDAL, permissionService, - projectDAL + projectDAL, + accessTokenQueue }); const identityService = identityServiceFactory({ permissionService, identityDAL, - identityOrgMembershipDAL + identityOrgMembershipDAL, + identityProjectDAL, + licenseService, + identityMetadataDAL }); + const identityAccessTokenService = identityAccessTokenServiceFactory({ identityAccessTokenDAL, - identityOrgMembershipDAL + identityOrgMembershipDAL, + accessTokenQueue }); + const identityProjectService = identityProjectServiceFactory({ permissionService, projectDAL, @@ -749,10 +1115,24 @@ export const registerRoutes = async ( permissionService, identityProjectDAL }); + + const identityProjectAdditionalPrivilegeV2Service = identityProjectAdditionalPrivilegeV2ServiceFactory({ + projectDAL, + identityProjectAdditionalPrivilegeDAL, + permissionService, + identityProjectDAL + }); + + const identityTokenAuthService = identityTokenAuthServiceFactory({ + identityTokenAuthDAL, + identityOrgMembershipDAL, + identityAccessTokenDAL, + permissionService, + licenseService + }); const identityUaService = identityUaServiceFactory({ identityOrgMembershipDAL, permissionService, - identityDAL, identityAccessTokenDAL, identityUaClientSecretDAL, identityUaDAL, @@ -762,7 +1142,6 @@ export const registerRoutes = async ( identityKubernetesAuthDAL, identityOrgMembershipDAL, identityAccessTokenDAL, - identityDAL, orgBotDAL, permissionService, licenseService @@ -771,7 +1150,6 @@ export const registerRoutes = async ( identityGcpAuthDAL, identityOrgMembershipDAL, identityAccessTokenDAL, - identityDAL, permissionService, licenseService }); @@ -780,7 +1158,6 @@ export const registerRoutes = async ( identityAccessTokenDAL, identityAwsAuthDAL, identityOrgMembershipDAL, - identityDAL, licenseService, permissionService }); @@ -789,11 +1166,19 @@ export const registerRoutes = async ( identityAzureAuthDAL, identityOrgMembershipDAL, identityAccessTokenDAL, - identityDAL, permissionService, licenseService }); + const identityOidcAuthService = identityOidcAuthServiceFactory({ + identityOidcAuthDAL, + identityOrgMembershipDAL, + identityAccessTokenDAL, + permissionService, + licenseService, + orgBotDAL + }); + const dynamicSecretProviders = buildDynamicSecretProviders(); const dynamicSecretQueueService = dynamicSecretLeaseQueueServiceFactory({ queueService, @@ -824,17 +1209,95 @@ export const registerRoutes = async ( const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({ auditLogDAL, queueService, + secretVersionDAL, + secretFolderVersionDAL: folderVersionDAL, + snapshotDAL, identityAccessTokenDAL, - secretSharingDAL + secretSharingDAL, + secretVersionV2DAL: secretVersionV2BridgeDAL, + identityUniversalAuthClientSecretDAL: identityUaClientSecretDAL + }); + + const dailyExpiringPkiItemAlert = dailyExpiringPkiItemAlertQueueServiceFactory({ + queueService, + pkiAlertService + }); + + const oidcService = oidcConfigServiceFactory({ + orgDAL, + orgMembershipDAL, + userDAL, + userAliasDAL, + licenseService, + tokenService, + smtpService, + orgBotDAL, + permissionService, + oidcConfigDAL + }); + + const userEngagementService = userEngagementServiceFactory({ + userDAL + }); + + const slackService = slackServiceFactory({ + permissionService, + kmsService, + slackIntegrationDAL, + workflowIntegrationDAL + }); + + const workflowIntegrationService = workflowIntegrationServiceFactory({ + permissionService, + workflowIntegrationDAL + }); + + const cmekService = cmekServiceFactory({ + kmsDAL, + kmsService, + permissionService + }); + + const externalMigrationQueue = externalMigrationQueueFactory({ + projectEnvService, + projectDAL, + projectService, + smtpService, + kmsService, + projectEnvDAL, + secretVersionDAL: secretVersionV2BridgeDAL, + secretTagDAL, + secretVersionTagDAL: secretVersionTagV2BridgeDAL, + folderDAL, + secretDAL: secretV2BridgeDAL, + queueService, + secretV2BridgeService + }); + + const migrationService = externalMigrationServiceFactory({ + externalMigrationQueue, + userDAL, + permissionService + }); + + const externalGroupOrgRoleMappingService = externalGroupOrgRoleMappingServiceFactory({ + permissionService, + licenseService, + orgRoleDAL, + externalGroupOrgRoleMappingDAL }); await superAdminService.initServerCfg(); - // + // setup the communication with license key server await licenseService.init(); + // Start HSM service if it's configured/enabled. + await hsmService.startService(); + await telemetryQueue.startTelemetryCheck(); await dailyResourceCleanUp.startCleanUp(); + await dailyExpiringPkiItemAlert.startSendingAlerts(); await kmsService.startService(); // inject all services @@ -848,6 +1311,7 @@ export const registerRoutes = async ( permission: permissionService, org: orgService, orgRole: orgRoleService, + oidc: oidcService, apiKey: apiKeyService, authToken: tokenService, superAdmin: superAdminService, @@ -859,6 +1323,7 @@ export const registerRoutes = async ( secret: secretService, secretReplication: secretReplicationService, secretTag: secretTagService, + rateLimit: rateLimitService, folder: folderService, secretImport: secretImportService, projectBot: projectBotService, @@ -869,11 +1334,13 @@ export const registerRoutes = async ( identity: identityService, identityAccessToken: identityAccessTokenService, identityProject: identityProjectService, + identityTokenAuth: identityTokenAuthService, identityUa: identityUaService, identityKubernetesAuth: identityKubernetesAuthService, identityGcpAuth: identityGcpAuthService, identityAwsAuth: identityAwsAuthService, identityAzureAuth: identityAzureAuthService, + identityOidcAuth: identityOidcAuthService, accessApprovalPolicy: accessApprovalPolicyService, accessApprovalRequest: accessApprovalRequestService, secretApprovalPolicy: secretApprovalPolicyService, @@ -886,6 +1353,13 @@ export const registerRoutes = async ( ldap: ldapService, auditLog: auditLogService, auditLogStream: auditLogStreamService, + certificate: certificateService, + certificateAuthority: certificateAuthorityService, + certificateTemplate: certificateTemplateService, + certificateAuthorityCrl: certificateAuthorityCrlService, + certificateEst: certificateEstService, + pkiAlert: pkiAlertService, + pkiCollection: pkiCollectionService, secretScanning: secretScanningService, license: licenseService, trustedIp: trustedIpService, @@ -894,15 +1368,36 @@ export const registerRoutes = async ( telemetry: telemetryService, projectUserAdditionalPrivilege: projectUserAdditionalPrivilegeService, identityProjectAdditionalPrivilege: identityProjectAdditionalPrivilegeService, - secretSharing: secretSharingService + identityProjectAdditionalPrivilegeV2: identityProjectAdditionalPrivilegeV2Service, + secretSharing: secretSharingService, + userEngagement: userEngagementService, + externalKms: externalKmsService, + hsm: hsmService, + cmek: cmekService, + orgAdmin: orgAdminService, + slack: slackService, + workflowIntegration: workflowIntegrationService, + migration: migrationService, + externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService, + projectTemplate: projectTemplateService, + totp: totpService }); + const cronJobs: CronJob[] = []; + if (appCfg.isProductionMode) { + const rateLimitSyncJob = await rateLimitService.initializeBackgroundSync(); + if (rateLimitSyncJob) { + cronJobs.push(rateLimitSyncJob); + } + } + server.decorate("store", { user: userDAL }); await server.register(injectIdentity, { userDAL, serviceTokenDAL }); await server.register(injectPermission); + await server.register(injectRateLimits); await server.register(injectAuditLogInfo); server.route({ @@ -915,7 +1410,7 @@ export const registerRoutes = async ( response: { 200: z.object({ date: z.date(), - message: z.literal("Ok"), + message: z.string().optional(), emailConfigured: z.boolean().optional(), inviteOnlySignup: z.boolean().optional(), redisConfigured: z.boolean().optional(), @@ -927,9 +1422,34 @@ export const registerRoutes = async ( handler: async () => { const cfg = getConfig(); const serverCfg = await getServerCfg(); + + // try { + // await db.raw("SELECT NOW()"); + // } catch (err) { + // logger.error("Health check: database connection failed", err); + // return reply.code(503).send({ + // date: new Date(), + // message: "Service unavailable" + // }); + // } + + // if (cfg.isRedisConfigured) { + // const redis = new Redis(cfg.REDIS_URL); + // try { + // await redis.ping(); + // redis.disconnect(); + // } catch (err) { + // logger.error("Health check: redis connection failed", err); + // return reply.code(503).send({ + // date: new Date(), + // message: "Service unavailable" + // }); + // } + // } + return { date: new Date(), - message: "Ok" as const, + message: "Ok", emailConfigured: cfg.isSmtpConfigured, inviteOnlySignup: Boolean(serverCfg.allowSignUp), redisConfigured: cfg.isRedisConfigured, @@ -939,6 +1459,9 @@ export const registerRoutes = async ( } }); + // register special routes + await server.register(registerCertificateEstRouter, { prefix: "/.well-known/est" }); + // register routes for v1 await server.register( async (v1Server) => { @@ -947,10 +1470,17 @@ export const registerRoutes = async ( }, { prefix: "/api/v1" } ); - await server.register(registerV2Routes, { prefix: "/api/v2" }); + await server.register( + async (v2Server) => { + await v2Server.register(registerV2EERoutes); + await v2Server.register(registerV2Routes); + }, + { prefix: "/api/v2" } + ); await server.register(registerV3Routes, { prefix: "/api/v3" }); server.addHook("onClose", async () => { + cronJobs.forEach((job) => job.stop()); await telemetryService.flushAll(); }); }; diff --git a/backend/src/server/routes/sanitizedSchemas.ts b/backend/src/server/routes/sanitizedSchemas.ts index 5b0b754f30..87fa2b1200 100644 --- a/backend/src/server/routes/sanitizedSchemas.ts +++ b/backend/src/server/routes/sanitizedSchemas.ts @@ -5,12 +5,14 @@ import { IdentityProjectAdditionalPrivilegeSchema, IntegrationAuthsSchema, ProjectRolesSchema, + ProjectsSchema, SecretApprovalPoliciesSchema, UsersSchema } from "@app/db/schemas"; -import { UnpackedPermissionSchema } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { UnpackedPermissionSchema } from "./santizedSchemas/permission"; + // sometimes the return data must be santizied to avoid leaking important values // always prefer pick over omit in zod export const integrationAuthPubSchema = IntegrationAuthsSchema.pick({ @@ -26,6 +28,34 @@ export const integrationAuthPubSchema = IntegrationAuthsSchema.pick({ updatedAt: true }); +export const DefaultResponseErrorsSchema = { + 400: z.object({ + statusCode: z.literal(400), + message: z.string(), + error: z.string() + }), + 404: z.object({ + statusCode: z.literal(404), + message: z.string(), + error: z.string() + }), + 401: z.object({ + statusCode: z.literal(401), + message: z.any(), + error: z.string() + }), + 403: z.object({ + statusCode: z.literal(403), + message: z.string(), + error: z.string() + }), + 500: z.object({ + statusCode: z.literal(500), + message: z.string(), + error: z.string() + }) +}; + export const sapPubSchema = SecretApprovalPoliciesSchema.merge( z.object({ environment: z.object({ @@ -63,7 +93,13 @@ export const secretRawSchema = z.object({ type: z.string(), secretKey: z.string(), secretValue: z.string(), - secretComment: z.string().optional() + secretComment: z.string(), + secretReminderNote: z.string().nullable().optional(), + secretReminderRepeatDays: z.number().nullable().optional(), + skipMultilineEncoding: z.boolean().default(false).nullable().optional(), + metadata: z.unknown().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() }); export const ProjectPermissionSchema = z.object({ @@ -114,13 +150,44 @@ export const ProjectSpecificPrivilegePermissionSchema = z.object({ }); export const SanitizedIdentityPrivilegeSchema = IdentityProjectAdditionalPrivilegeSchema.extend({ - permissions: UnpackedPermissionSchema.array() + permissions: UnpackedPermissionSchema.array().transform((permissions) => + permissions.filter( + (caslRule) => + ![ + ProjectPermissionSub.DynamicSecrets, + ProjectPermissionSub.SecretImports, + ProjectPermissionSub.SecretFolders + ].includes((caslRule?.subject as ProjectPermissionSub) || "") + ) + ) }); export const SanitizedRoleSchema = ProjectRolesSchema.extend({ permissions: UnpackedPermissionSchema.array() }); +export const SanitizedRoleSchemaV1 = ProjectRolesSchema.extend({ + permissions: UnpackedPermissionSchema.array().transform((caslPermission) => + // first map and remove other actions of folder permission + caslPermission + .map((caslRule) => + caslRule.subject === ProjectPermissionSub.SecretFolders + ? { + ...caslRule, + action: caslRule.action.filter((caslAction) => caslAction === ProjectPermissionActions.Read) + } + : caslRule + ) + // now filter out dynamic secret, secret import permission + .filter( + (caslRule) => + ![ProjectPermissionSub.DynamicSecrets, ProjectPermissionSub.SecretImports].includes( + (caslRule?.subject as ProjectPermissionSub) || "" + ) && caslRule.action.length > 0 + ) + ) +}); + export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({ inputIV: true, inputTag: true, @@ -135,3 +202,18 @@ export const SanitizedAuditLogStreamSchema = z.object({ createdAt: z.date(), updatedAt: z.date() }); + +export const SanitizedProjectSchema = ProjectsSchema.pick({ + id: true, + name: true, + slug: true, + autoCapitalization: true, + orgId: true, + createdAt: true, + updatedAt: true, + version: true, + upgradeStatus: true, + pitVersionLimit: true, + kmsCertificateKeyId: true, + auditLogsRetentionDays: true +}); diff --git a/backend/src/server/routes/santizedSchemas/identitiy-additional-privilege.ts b/backend/src/server/routes/santizedSchemas/identitiy-additional-privilege.ts new file mode 100644 index 0000000000..e44b9af4ec --- /dev/null +++ b/backend/src/server/routes/santizedSchemas/identitiy-additional-privilege.ts @@ -0,0 +1,7 @@ +import { IdentityProjectAdditionalPrivilegeSchema } from "@app/db/schemas"; + +import { UnpackedPermissionSchema } from "./permission"; + +export const SanitizedIdentityPrivilegeSchema = IdentityProjectAdditionalPrivilegeSchema.extend({ + permissions: UnpackedPermissionSchema.array() +}); diff --git a/backend/src/server/routes/santizedSchemas/permission.ts b/backend/src/server/routes/santizedSchemas/permission.ts new file mode 100644 index 0000000000..5b5fdccacc --- /dev/null +++ b/backend/src/server/routes/santizedSchemas/permission.ts @@ -0,0 +1,16 @@ +import { MongoAbility, RawRuleOf } from "@casl/ability"; +import { PackRule, unpackRules } from "@casl/ability/extra"; +import { z } from "zod"; + +export const UnpackedPermissionSchema = z.object({ + subject: z + .union([z.string().min(1), z.string().array()]) + .transform((el) => (typeof el !== "string" ? el[0] : el)) + .optional(), + action: z.union([z.string().min(1), z.string().array()]).transform((el) => (typeof el === "string" ? [el] : el)), + conditions: z.unknown().optional(), + inverted: z.boolean().optional() +}); + +export const unpackPermissions = (permissions: unknown) => + UnpackedPermissionSchema.array().parse(unpackRules((permissions || []) as PackRule>[])); diff --git a/backend/src/server/routes/santizedSchemas/user-additional-privilege.ts b/backend/src/server/routes/santizedSchemas/user-additional-privilege.ts new file mode 100644 index 0000000000..502ab0f7c6 --- /dev/null +++ b/backend/src/server/routes/santizedSchemas/user-additional-privilege.ts @@ -0,0 +1,7 @@ +import { ProjectUserAdditionalPrivilegeSchema } from "@app/db/schemas"; + +import { UnpackedPermissionSchema } from "./permission"; + +export const SanitizedUserProjectAdditionalPrivilegeSchema = ProjectUserAdditionalPrivilegeSchema.extend({ + permissions: UnpackedPermissionSchema.array() +}); diff --git a/backend/src/server/routes/v1/admin-router.ts b/backend/src/server/routes/v1/admin-router.ts index 572409d9b0..6ecebb274c 100644 --- a/backend/src/server/routes/v1/admin-router.ts +++ b/backend/src/server/routes/v1/admin-router.ts @@ -2,12 +2,14 @@ import { z } from "zod"; import { OrganizationsSchema, SuperAdminSchema, UsersSchema } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; -import { UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError } from "@app/lib/errors"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; +import { LoginMethod } from "@app/services/super-admin/super-admin-types"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; export const registerAdminRouter = async (server: FastifyZodProvider) => { @@ -20,8 +22,16 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { schema: { response: { 200: z.object({ - config: SuperAdminSchema.omit({ createdAt: true, updatedAt: true }).extend({ + config: SuperAdminSchema.omit({ + createdAt: true, + updatedAt: true, + encryptedSlackClientId: true, + encryptedSlackClientSecret: true + }).extend({ isMigrationModeOn: z.boolean(), + defaultAuthOrgSlug: z.string().nullable(), + defaultAuthOrgAuthEnforced: z.boolean().nullish(), + defaultAuthOrgAuthMethod: z.string().nullish(), isSecretScanningDisabled: z.boolean() }) }) @@ -51,11 +61,24 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { allowSignUp: z.boolean().optional(), allowedSignUpDomain: z.string().optional().nullable(), trustSamlEmails: z.boolean().optional(), - trustLdapEmails: z.boolean().optional() + trustLdapEmails: z.boolean().optional(), + trustOidcEmails: z.boolean().optional(), + defaultAuthOrgId: z.string().optional().nullable(), + enabledLoginMethods: z + .nativeEnum(LoginMethod) + .array() + .optional() + .refine((methods) => !methods || methods.length > 0, { + message: "At least one login method should be enabled." + }), + slackClientId: z.string().optional(), + slackClientSecret: z.string().optional() }), response: { 200: z.object({ - config: SuperAdminSchema + config: SuperAdminSchema.extend({ + defaultAuthOrgSlug: z.string().nullable() + }) }) } }, @@ -65,11 +88,165 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { }); }, handler: async (req) => { - const config = await server.services.superAdmin.updateServerCfg(req.body); + const config = await server.services.superAdmin.updateServerCfg(req.body, req.permission.id); return { config }; } }); + server.route({ + method: "GET", + url: "/user-management/users", + config: { + rateLimit: readLimit + }, + schema: { + querystring: z.object({ + searchTerm: z.string().default(""), + offset: z.coerce.number().default(0), + limit: z.coerce.number().max(100).default(20) + }), + response: { + 200: z.object({ + users: UsersSchema.pick({ + username: true, + firstName: true, + lastName: true, + email: true, + id: true, + superAdmin: true + }).array() + }) + } + }, + onRequest: (req, res, done) => { + verifyAuth([AuthMode.JWT])(req, res, () => { + verifySuperAdmin(req, res, done); + }); + }, + handler: async (req) => { + const users = await server.services.superAdmin.getUsers({ + ...req.query + }); + + return { + users + }; + } + }); + + server.route({ + method: "GET", + url: "/integrations/slack/config", + config: { + rateLimit: readLimit + }, + schema: { + response: { + 200: z.object({ + clientId: z.string(), + clientSecret: z.string() + }) + } + }, + onRequest: (req, res, done) => { + verifyAuth([AuthMode.JWT])(req, res, () => { + verifySuperAdmin(req, res, done); + }); + }, + handler: async () => { + const adminSlackConfig = await server.services.superAdmin.getAdminSlackConfig(); + + return adminSlackConfig; + } + }); + + server.route({ + method: "DELETE", + url: "/user-management/users/:userId", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + userId: z.string() + }), + response: { + 200: z.object({ + users: UsersSchema.pick({ + username: true, + firstName: true, + lastName: true, + email: true, + id: true + }) + }) + } + }, + onRequest: (req, res, done) => { + verifyAuth([AuthMode.JWT])(req, res, () => { + verifySuperAdmin(req, res, done); + }); + }, + handler: async (req) => { + const users = await server.services.superAdmin.deleteUser(req.params.userId); + + return { + users + }; + } + }); + + server.route({ + method: "GET", + url: "/encryption-strategies", + config: { + rateLimit: readLimit + }, + schema: { + response: { + 200: z.object({ + strategies: z + .object({ + strategy: z.nativeEnum(RootKeyEncryptionStrategy), + enabled: z.boolean() + }) + .array() + }) + } + }, + onRequest: (req, res, done) => { + verifyAuth([AuthMode.JWT])(req, res, () => { + verifySuperAdmin(req, res, done); + }); + }, + + handler: async () => { + const encryptionDetails = await server.services.superAdmin.getConfiguredEncryptionStrategies(); + return encryptionDetails; + } + }); + + server.route({ + method: "PATCH", + url: "/encryption-strategies", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + strategy: z.nativeEnum(RootKeyEncryptionStrategy) + }) + }, + onRequest: (req, res, done) => { + verifyAuth([AuthMode.JWT])(req, res, () => { + verifySuperAdmin(req, res, done); + }); + }, + handler: async (req) => { + await server.services.superAdmin.updateRootEncryptionStrategy(req.body.strategy); + } + }); + server.route({ method: "POST", url: "/signup", @@ -79,6 +256,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { schema: { body: z.object({ email: z.string().email().trim(), + password: z.string().trim(), firstName: z.string().trim(), lastName: z.string().trim().optional(), protectedKey: z.string().trim(), @@ -104,8 +282,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { handler: async (req, res) => { const appCfg = getConfig(); const serverCfg = await getServerCfg(); - if (serverCfg.initialized) - throw new UnauthorizedError({ name: "Admin sign up", message: "Admin has been created" }); + if (serverCfg.initialized) throw new BadRequestError({ message: "Admin account has already been set up" }); const { user, token, organization } = await server.services.superAdmin.adminSignUp({ ...req.body, ip: req.realIp, diff --git a/backend/src/server/routes/v1/auth-router.ts b/backend/src/server/routes/v1/auth-router.ts index 7f09a904b6..6e39d0451d 100644 --- a/backend/src/server/routes/v1/auth-router.ts +++ b/backend/src/server/routes/v1/auth-router.ts @@ -2,7 +2,7 @@ import jwt from "jsonwebtoken"; import { z } from "zod"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { authRateLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode, AuthModeRefreshJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type"; @@ -71,23 +71,34 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => { const refreshToken = req.cookies.jid; const appCfg = getConfig(); if (!refreshToken) - throw new BadRequestError({ - name: "Auth token route", - message: "Failed to find refresh token" + throw new NotFoundError({ + name: "AuthTokenNotFound", + message: "Failed to find refresh token" }); const decodedToken = jwt.verify(refreshToken, appCfg.AUTH_SECRET) as AuthModeRefreshJwtTokenPayload; if (decodedToken.authTokenType !== AuthTokenType.REFRESH_TOKEN) - throw new UnauthorizedError({ message: "Invalid token", name: "Auth token route" }); + throw new UnauthorizedError({ + message: "The token provided is not a refresh token", + name: "InvalidToken" + }); const tokenVersion = await server.services.authToken.getUserTokenSessionById( decodedToken.tokenVersionId, decodedToken.userId ); - if (!tokenVersion) throw new UnauthorizedError({ message: "Invalid token", name: "Auth token route" }); + if (!tokenVersion) + throw new UnauthorizedError({ + message: "Valid token version not found", + name: "InvalidToken" + }); - if (decodedToken.refreshVersion !== tokenVersion.refreshVersion) - throw new UnauthorizedError({ message: "Invalid token", name: "Auth token route" }); + if (decodedToken.refreshVersion !== tokenVersion.refreshVersion) { + throw new UnauthorizedError({ + message: "Token version mismatch", + name: "InvalidToken" + }); + } const token = jwt.sign( { @@ -96,7 +107,9 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => { userId: decodedToken.userId, tokenVersionId: tokenVersion.id, accessVersion: tokenVersion.accessVersion, - organizationId: decodedToken.organizationId + organizationId: decodedToken.organizationId, + isMfaVerified: decodedToken.isMfaVerified, + mfaMethod: decodedToken.mfaMethod }, appCfg.AUTH_SECRET, { expiresIn: appCfg.JWT_AUTH_LIFETIME } diff --git a/backend/src/server/routes/v1/certificate-authority-router.ts b/backend/src/server/routes/v1/certificate-authority-router.ts new file mode 100644 index 0000000000..88ec8500ea --- /dev/null +++ b/backend/src/server/routes/v1/certificate-authority-router.ts @@ -0,0 +1,866 @@ +/* eslint-disable @typescript-eslint/no-floating-promises */ +import ms from "ms"; +import { z } from "zod"; + +import { CertificateAuthoritiesSchema, CertificateTemplatesSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types"; +import { CaRenewalType, CaStatus, CaType } from "@app/services/certificate-authority/certificate-authority-types"; +import { + validateAltNamesField, + validateCaDateField +} from "@app/services/certificate-authority/certificate-authority-validators"; + +export const registerCaRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Create CA", + body: z + .object({ + projectSlug: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.projectSlug), + type: z.nativeEnum(CaType).describe(CERTIFICATE_AUTHORITIES.CREATE.type), + friendlyName: z.string().optional().describe(CERTIFICATE_AUTHORITIES.CREATE.friendlyName), + commonName: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.commonName), + organization: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.organization), + ou: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.ou), + country: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.country), + province: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.province), + locality: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.locality), + // format: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date#date_time_string_format + notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.CREATE.notBefore), + notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.CREATE.notAfter), + maxPathLength: z.number().min(-1).default(-1).describe(CERTIFICATE_AUTHORITIES.CREATE.maxPathLength), + keyAlgorithm: z + .nativeEnum(CertKeyAlgorithm) + .default(CertKeyAlgorithm.RSA_2048) + .describe(CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm), + requireTemplateForIssuance: z + .boolean() + .default(false) + .describe(CERTIFICATE_AUTHORITIES.CREATE.requireTemplateForIssuance) + }) + .refine( + (data) => { + // Check that at least one of the specified fields is non-empty + return [data.commonName, data.organization, data.ou, data.country, data.province, data.locality].some( + (field) => field !== "" + ); + }, + { + message: + "At least one of the fields commonName, organization, ou, country, province, or locality must be non-empty", + path: [] + } + ), + response: { + 200: z.object({ + ca: CertificateAuthoritiesSchema + }) + } + }, + handler: async (req) => { + const ca = await server.services.certificateAuthority.createCa({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.CREATE_CA, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + ca + }; + } + }); + + server.route({ + method: "GET", + url: "/:caId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET.caId) + }), + response: { + 200: z.object({ + ca: CertificateAuthoritiesSchema + }) + } + }, + handler: async (req) => { + const ca = await server.services.certificateAuthority.getCaById({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.GET_CA, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + ca + }; + } + }); + + // this endpoint will be used to serve the CA certificate when a client makes a request + // against the Authority Information Access CA Issuer URL + server.route({ + method: "GET", + url: "/:caId/certificates/:caCertId/der", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get DER-encoded certificate of CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT_BY_ID.caId), + caCertId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT_BY_ID.caCertId) + }), + response: { + 200: z.instanceof(Buffer) + } + }, + handler: async (req, res) => { + const caCert = await server.services.certificateAuthority.getCaCertById(req.params); + + res.header("Content-Type", "application/pkix-cert"); + + return Buffer.from(caCert.rawData); + } + }); + + server.route({ + method: "PATCH", + url: "/:caId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Update CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.UPDATE.caId) + }), + body: z.object({ + status: z.enum([CaStatus.ACTIVE, CaStatus.DISABLED]).optional().describe(CERTIFICATE_AUTHORITIES.UPDATE.status), + requireTemplateForIssuance: z + .boolean() + .optional() + .describe(CERTIFICATE_AUTHORITIES.CREATE.requireTemplateForIssuance) + }), + response: { + 200: z.object({ + ca: CertificateAuthoritiesSchema + }) + } + }, + handler: async (req) => { + const ca = await server.services.certificateAuthority.updateCaById({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.UPDATE_CA, + metadata: { + caId: ca.id, + dn: ca.dn, + status: ca.status as CaStatus + } + } + }); + + return { + ca + }; + } + }); + + server.route({ + method: "DELETE", + url: "/:caId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.DELETE.caId) + }), + response: { + 200: z.object({ + ca: CertificateAuthoritiesSchema + }) + } + }, + handler: async (req) => { + const ca = await server.services.certificateAuthority.deleteCaById({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.DELETE_CA, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + ca + }; + } + }); + + server.route({ + method: "GET", + url: "/:caId/csr", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get CA CSR", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CSR.caId) + }), + response: { + 200: z.object({ + csr: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CSR.csr) + }) + } + }, + handler: async (req) => { + const { ca, csr } = await server.services.certificateAuthority.getCaCsr({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.GET_CA_CSR, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + csr + }; + } + }); + + server.route({ + method: "POST", + url: "/:caId/renew", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Perform CA certificate renewal", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.caId) + }), + body: z.object({ + type: z.nativeEnum(CaRenewalType).describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.type), + notAfter: validateCaDateField.describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.notAfter) + }), + response: { + 200: z.object({ + certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.certificate), + certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.certificateChain), + serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.serialNumber) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, serialNumber, ca } = + await server.services.certificateAuthority.renewCaCert({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.RENEW_CA, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + certificate, + certificateChain, + serialNumber + }; + } + }); + + server.route({ + method: "GET", + url: "/:caId/ca-certificates", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get list of past and current CA certificates for a CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.caId) + }), + response: { + 200: z.array( + z.object({ + certificate: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.certificate), + certificateChain: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.certificateChain), + serialNumber: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.serialNumber), + version: z.number().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.version) + }) + ) + } + }, + handler: async (req) => { + const { caCerts, ca } = await server.services.certificateAuthority.getCaCerts({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.GET_CA_CERTS, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return caCerts; + } + }); + + server.route({ + method: "GET", + url: "/:caId/certificate", // TODO: consider updating endpoint structure considering CA certificates + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get current CA cert and cert chain of a CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT.caId) + }), + response: { + 200: z.object({ + certificate: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CERT.certificate), + certificateChain: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CERT.certificateChain), + serialNumber: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CERT.serialNumber) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, serialNumber, ca } = await server.services.certificateAuthority.getCaCert({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.GET_CA_CERT, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + certificate, + certificateChain, + serialNumber + }; + } + }); + + server.route({ + method: "POST", + url: "/:caId/sign-intermediate", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Create intermediate CA certificate from parent CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.caId) + }), + body: z.object({ + csr: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.csr), + notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.notBefore), + notAfter: validateCaDateField.describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.notAfter), + maxPathLength: z.number().min(-1).default(-1).describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.maxPathLength) + }), + response: { + 200: z.object({ + certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.certificate), + certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.certificateChain), + issuingCaCertificate: z + .string() + .trim() + .describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.issuingCaCertificate), + serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.serialNumber) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } = + await server.services.certificateAuthority.signIntermediate({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.SIGN_INTERMEDIATE, + metadata: { + caId: ca.id, + dn: ca.dn, + serialNumber + } + } + }); + + return { + certificate, + certificateChain, + issuingCaCertificate, + serialNumber + }; + } + }); + + server.route({ + method: "POST", + url: "/:caId/import-certificate", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Import certificate and chain to CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.IMPORT_CERT.caId) + }), + body: z.object({ + certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.IMPORT_CERT.certificate), + certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.IMPORT_CERT.certificateChain) + }), + response: { + 200: z.object({ + message: z.string().trim(), + caId: z.string().trim() + }) + } + }, + handler: async (req) => { + const { ca } = await server.services.certificateAuthority.importCertToCa({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.IMPORT_CA_CERT, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + message: "Successfully imported certificate to CA", + caId: req.params.caId + }; + } + }); + + server.route({ + method: "POST", + url: "/:caId/issue-certificate", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Issue certificate from CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.caId) + }), + body: z + .object({ + pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.pkiCollectionId), + friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.friendlyName), + commonName: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.commonName), + altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.altNames), + ttl: z + .string() + .refine((val) => ms(val) > 0, "TTL must be a positive number") + .describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.ttl), + notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notBefore), + notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter), + keyUsages: z.nativeEnum(CertKeyUsage).array().optional(), + extendedKeyUsages: z.nativeEnum(CertExtendedKeyUsage).array().optional() + }) + .refine( + (data) => { + const { ttl, notAfter } = data; + return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined); + }, + { + message: "Either ttl or notAfter must be present, but not both", + path: ["ttl", "notAfter"] + } + ), + response: { + 200: z.object({ + certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificate), + issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate), + certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain), + privateKey: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.privateKey), + serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, issuingCaCertificate, privateKey, serialNumber, ca } = + await server.services.certificateAuthority.issueCertFromCa({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.ISSUE_CERT, + metadata: { + caId: ca.id, + dn: ca.dn, + serialNumber + } + } + }); + + return { + certificate, + certificateChain, + issuingCaCertificate, + privateKey, + serialNumber + }; + } + }); + + server.route({ + method: "POST", + url: "/:caId/sign-certificate", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Sign certificate from CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.caId) + }), + body: z + .object({ + csr: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.csr), + pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.pkiCollectionId), + friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.friendlyName), + commonName: z.string().trim().min(1).optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.commonName), + altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.altNames), + ttl: z + .string() + .refine((val) => ms(val) > 0, "TTL must be a positive number") + .describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.ttl), + notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notBefore), + notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter), + keyUsages: z.nativeEnum(CertKeyUsage).array().optional(), + extendedKeyUsages: z.nativeEnum(CertExtendedKeyUsage).array().optional() + }) + .refine( + (data) => { + const { ttl, notAfter } = data; + return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined); + }, + { + message: "Either ttl or notAfter must be present, but not both", + path: ["ttl", "notAfter"] + } + ), + response: { + 200: z.object({ + certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.certificate), + issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate), + certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain), + serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } = + await server.services.certificateAuthority.signCertFromCa({ + isInternal: false, + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.SIGN_CERT, + metadata: { + caId: ca.id, + dn: ca.dn, + serialNumber + } + } + }); + + return { + certificate: certificate.toString("pem"), + certificateChain, + issuingCaCertificate, + serialNumber + }; + } + }); + + server.route({ + method: "GET", + url: "/:caId/certificate-templates", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get list of certificate templates for the CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.caId) + }), + response: { + 200: z.object({ + certificateTemplates: CertificateTemplatesSchema.array() + }) + } + }, + handler: async (req) => { + const { certificateTemplates, ca } = await server.services.certificateAuthority.getCaCertificateTemplates({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.GET_CA_CERTIFICATE_TEMPLATES, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return { + certificateTemplates + }; + } + }); + + server.route({ + method: "GET", + url: "/:caId/crls", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get list of CRLs of the CA", + params: z.object({ + caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CRLS.caId) + }), + response: { + 200: z.array( + z.object({ + id: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRLS.id), + crl: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRLS.crl) + }) + ) + } + }, + handler: async (req) => { + const { ca, crls } = await server.services.certificateAuthorityCrl.getCaCrls({ + caId: req.params.caId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.GET_CA_CRLS, + metadata: { + caId: ca.id, + dn: ca.dn + } + } + }); + + return crls; + } + }); + + // TODO: implement this endpoint in the future + // server.route({ + // method: "GET", + // url: "/:caId/crl/rotate", + // config: { + // rateLimit: writeLimit + // }, + // onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + // schema: { + // description: "Rotate CRLs of the CA", + // params: z.object({ + // caId: z.string().trim() + // }), + // response: { + // 200: z.object({ + // message: z.string() + // }) + // } + // }, + // handler: async (req) => { + // await server.services.certificateAuthority.rotateCaCrl({ + // caId: req.params.caId, + // actor: req.permission.type, + // actorId: req.permission.id, + // actorAuthMethod: req.permission.authMethod, + // actorOrgId: req.permission.orgId + // }); + // return { + // message: "Successfully rotated CA CRL" + // }; + // } + // }); +}; diff --git a/backend/src/server/routes/v1/certificate-router.ts b/backend/src/server/routes/v1/certificate-router.ts new file mode 100644 index 0000000000..99d57e802e --- /dev/null +++ b/backend/src/server/routes/v1/certificate-router.ts @@ -0,0 +1,412 @@ +import ms from "ms"; +import { z } from "zod"; + +import { CertificatesSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { CERTIFICATE_AUTHORITIES, CERTIFICATES } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { CertExtendedKeyUsage, CertKeyUsage, CrlReason } from "@app/services/certificate/certificate-types"; +import { + validateAltNamesField, + validateCaDateField +} from "@app/services/certificate-authority/certificate-authority-validators"; + +export const registerCertRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/:serialNumber", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get certificate", + params: z.object({ + serialNumber: z.string().trim().describe(CERTIFICATES.GET.serialNumber) + }), + response: { + 200: z.object({ + certificate: CertificatesSchema + }) + } + }, + handler: async (req) => { + const { cert, ca } = await server.services.certificate.getCert({ + serialNumber: req.params.serialNumber, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.GET_CERT, + metadata: { + certId: cert.id, + cn: cert.commonName, + serialNumber: cert.serialNumber + } + } + }); + + return { + certificate: cert + }; + } + }); + + server.route({ + method: "POST", + url: "/issue-certificate", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Issue certificate", + body: z + .object({ + caId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.caId), + certificateTemplateId: z + .string() + .trim() + .optional() + .describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateTemplateId), + pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.pkiCollectionId), + friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.friendlyName), + commonName: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.commonName), + altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.altNames), + ttl: z + .string() + .refine((val) => ms(val) > 0, "TTL must be a positive number") + .describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.ttl), + notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notBefore), + notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter), + keyUsages: z + .nativeEnum(CertKeyUsage) + .array() + .optional() + .describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.keyUsages), + extendedKeyUsages: z + .nativeEnum(CertExtendedKeyUsage) + .array() + .optional() + .describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.extendedKeyUsages) + }) + .refine( + (data) => { + const { ttl, notAfter } = data; + return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined); + }, + { + message: "Either ttl or notAfter must be present, but not both", + path: ["ttl", "notAfter"] + } + ) + .refine( + (data) => + (data.caId !== undefined && data.certificateTemplateId === undefined) || + (data.caId === undefined && data.certificateTemplateId !== undefined), + { + message: "Either CA ID or Certificate Template ID must be present, but not both", + path: ["caId", "certificateTemplateId"] + } + ), + response: { + 200: z.object({ + certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificate), + issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate), + certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain), + privateKey: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.privateKey), + serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, issuingCaCertificate, privateKey, serialNumber, ca } = + await server.services.certificateAuthority.issueCertFromCa({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.ISSUE_CERT, + metadata: { + caId: ca.id, + dn: ca.dn, + serialNumber + } + } + }); + + return { + certificate, + certificateChain, + issuingCaCertificate, + privateKey, + serialNumber + }; + } + }); + + server.route({ + method: "POST", + url: "/sign-certificate", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Sign certificate", + body: z + .object({ + caId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.caId), + certificateTemplateId: z + .string() + .trim() + .optional() + .describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateTemplateId), + pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.pkiCollectionId), + csr: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.csr), + friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.friendlyName), + commonName: z.string().trim().min(1).optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.commonName), + altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.altNames), + ttl: z + .string() + .refine((val) => ms(val) > 0, "TTL must be a positive number") + .describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.ttl), + notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notBefore), + notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter), + keyUsages: z + .nativeEnum(CertKeyUsage) + .array() + .optional() + .describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.keyUsages), + extendedKeyUsages: z + .nativeEnum(CertExtendedKeyUsage) + .array() + .optional() + .describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.extendedKeyUsages) + }) + .refine( + (data) => { + const { ttl, notAfter } = data; + return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined); + }, + { + message: "Either ttl or notAfter must be present, but not both", + path: ["ttl", "notAfter"] + } + ) + .refine( + (data) => + (data.caId !== undefined && data.certificateTemplateId === undefined) || + (data.caId === undefined && data.certificateTemplateId !== undefined), + { + message: "Either CA ID or Certificate Template ID must be present, but not both", + path: ["caId", "certificateTemplateId"] + } + ), + response: { + 200: z.object({ + certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.certificate), + issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate), + certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain), + serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } = + await server.services.certificateAuthority.signCertFromCa({ + isInternal: false, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.SIGN_CERT, + metadata: { + caId: ca.id, + dn: ca.dn, + serialNumber + } + } + }); + + return { + certificate: certificate.toString("pem"), + certificateChain, + issuingCaCertificate, + serialNumber + }; + } + }); + + server.route({ + method: "POST", + url: "/:serialNumber/revoke", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Revoke", + params: z.object({ + serialNumber: z.string().trim().describe(CERTIFICATES.REVOKE.serialNumber) + }), + body: z.object({ + revocationReason: z.nativeEnum(CrlReason).describe(CERTIFICATES.REVOKE.revocationReason) + }), + response: { + 200: z.object({ + message: z.string().trim(), + serialNumber: z.string().trim().describe(CERTIFICATES.REVOKE.serialNumberRes), + revokedAt: z.date().describe(CERTIFICATES.REVOKE.revokedAt) + }) + } + }, + handler: async (req) => { + const { revokedAt, cert, ca } = await server.services.certificate.revokeCert({ + serialNumber: req.params.serialNumber, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.REVOKE_CERT, + metadata: { + certId: cert.id, + cn: cert.commonName, + serialNumber: cert.serialNumber + } + } + }); + + return { + message: "Successfully revoked certificate", + serialNumber: req.params.serialNumber, + revokedAt + }; + } + }); + + server.route({ + method: "DELETE", + url: "/:serialNumber", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete certificate", + params: z.object({ + serialNumber: z.string().trim().describe(CERTIFICATES.DELETE.serialNumber) + }), + response: { + 200: z.object({ + certificate: CertificatesSchema + }) + } + }, + handler: async (req) => { + const { deletedCert, ca } = await server.services.certificate.deleteCert({ + serialNumber: req.params.serialNumber, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.DELETE_CERT, + metadata: { + certId: deletedCert.id, + cn: deletedCert.commonName, + serialNumber: deletedCert.serialNumber + } + } + }); + + return { + certificate: deletedCert + }; + } + }); + + server.route({ + method: "GET", + url: "/:serialNumber/certificate", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get certificate body of certificate", + params: z.object({ + serialNumber: z.string().trim().describe(CERTIFICATES.GET_CERT.serialNumber) + }), + response: { + 200: z.object({ + certificate: z.string().trim().describe(CERTIFICATES.GET_CERT.certificate), + certificateChain: z.string().trim().describe(CERTIFICATES.GET_CERT.certificateChain), + serialNumber: z.string().trim().describe(CERTIFICATES.GET_CERT.serialNumberRes) + }) + } + }, + handler: async (req) => { + const { certificate, certificateChain, serialNumber, cert, ca } = await server.services.certificate.getCertBody({ + serialNumber: req.params.serialNumber, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: ca.projectId, + event: { + type: EventType.DELETE_CERT, + metadata: { + certId: cert.id, + cn: cert.commonName, + serialNumber: cert.serialNumber + } + } + }); + + return { + certificate, + certificateChain, + serialNumber + }; + } + }); +}; diff --git a/backend/src/server/routes/v1/certificate-template-router.ts b/backend/src/server/routes/v1/certificate-template-router.ts new file mode 100644 index 0000000000..681e40e0d5 --- /dev/null +++ b/backend/src/server/routes/v1/certificate-template-router.ts @@ -0,0 +1,377 @@ +import ms from "ms"; +import { z } from "zod"; + +import { CertificateTemplateEstConfigsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { CERTIFICATE_TEMPLATES } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { CertExtendedKeyUsage, CertKeyUsage } from "@app/services/certificate/certificate-types"; +import { sanitizedCertificateTemplate } from "@app/services/certificate-template/certificate-template-schema"; +import { validateTemplateRegexField } from "@app/services/certificate-template/certificate-template-validators"; + +const sanitizedEstConfig = CertificateTemplateEstConfigsSchema.pick({ + id: true, + certificateTemplateId: true, + isEnabled: true, + disableBootstrapCertValidation: true +}); + +export const registerCertificateTemplateRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/:certificateTemplateId", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + certificateTemplateId: z.string().describe(CERTIFICATE_TEMPLATES.GET.certificateTemplateId) + }), + response: { + 200: sanitizedCertificateTemplate + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const certificateTemplate = await server.services.certificateTemplate.getCertTemplate({ + id: req.params.certificateTemplateId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: certificateTemplate.projectId, + event: { + type: EventType.GET_CERTIFICATE_TEMPLATE, + metadata: { + certificateTemplateId: certificateTemplate.id + } + } + }); + + return certificateTemplate; + } + }); + + server.route({ + method: "POST", + url: "/", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + caId: z.string().describe(CERTIFICATE_TEMPLATES.CREATE.caId), + pkiCollectionId: z.string().optional().describe(CERTIFICATE_TEMPLATES.CREATE.pkiCollectionId), + name: z.string().min(1).describe(CERTIFICATE_TEMPLATES.CREATE.name), + commonName: validateTemplateRegexField.describe(CERTIFICATE_TEMPLATES.CREATE.commonName), + subjectAlternativeName: validateTemplateRegexField.describe( + CERTIFICATE_TEMPLATES.CREATE.subjectAlternativeName + ), + ttl: z + .string() + .refine((val) => ms(val) > 0, "TTL must be a positive number") + .describe(CERTIFICATE_TEMPLATES.CREATE.ttl), + keyUsages: z + .nativeEnum(CertKeyUsage) + .array() + .optional() + .default([CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]) + .describe(CERTIFICATE_TEMPLATES.CREATE.keyUsages), + extendedKeyUsages: z + .nativeEnum(CertExtendedKeyUsage) + .array() + .optional() + .default([]) + .describe(CERTIFICATE_TEMPLATES.CREATE.extendedKeyUsages) + }), + response: { + 200: sanitizedCertificateTemplate + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const certificateTemplate = await server.services.certificateTemplate.createCertTemplate({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: certificateTemplate.projectId, + event: { + type: EventType.CREATE_CERTIFICATE_TEMPLATE, + metadata: { + certificateTemplateId: certificateTemplate.id, + caId: certificateTemplate.caId, + pkiCollectionId: certificateTemplate.pkiCollectionId as string, + name: certificateTemplate.name, + commonName: certificateTemplate.commonName, + subjectAlternativeName: certificateTemplate.subjectAlternativeName, + ttl: certificateTemplate.ttl + } + } + }); + + return certificateTemplate; + } + }); + + server.route({ + method: "PATCH", + url: "/:certificateTemplateId", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + caId: z.string().optional().describe(CERTIFICATE_TEMPLATES.UPDATE.caId), + pkiCollectionId: z.string().optional().describe(CERTIFICATE_TEMPLATES.UPDATE.pkiCollectionId), + name: z.string().min(1).optional().describe(CERTIFICATE_TEMPLATES.UPDATE.name), + commonName: validateTemplateRegexField.optional().describe(CERTIFICATE_TEMPLATES.UPDATE.commonName), + subjectAlternativeName: validateTemplateRegexField + .optional() + .describe(CERTIFICATE_TEMPLATES.UPDATE.subjectAlternativeName), + ttl: z + .string() + .refine((val) => ms(val) > 0, "TTL must be a positive number") + .optional() + .describe(CERTIFICATE_TEMPLATES.UPDATE.ttl), + keyUsages: z.nativeEnum(CertKeyUsage).array().optional().describe(CERTIFICATE_TEMPLATES.UPDATE.keyUsages), + extendedKeyUsages: z + .nativeEnum(CertExtendedKeyUsage) + .array() + .optional() + .describe(CERTIFICATE_TEMPLATES.UPDATE.extendedKeyUsages) + }), + params: z.object({ + certificateTemplateId: z.string().describe(CERTIFICATE_TEMPLATES.UPDATE.certificateTemplateId) + }), + response: { + 200: sanitizedCertificateTemplate + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const certificateTemplate = await server.services.certificateTemplate.updateCertTemplate({ + ...req.body, + id: req.params.certificateTemplateId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: certificateTemplate.projectId, + event: { + type: EventType.UPDATE_CERTIFICATE_TEMPLATE, + metadata: { + certificateTemplateId: certificateTemplate.id, + caId: certificateTemplate.caId, + pkiCollectionId: certificateTemplate.pkiCollectionId as string, + name: certificateTemplate.name, + commonName: certificateTemplate.commonName, + subjectAlternativeName: certificateTemplate.subjectAlternativeName, + ttl: certificateTemplate.ttl + } + } + }); + + return certificateTemplate; + } + }); + + server.route({ + method: "DELETE", + url: "/:certificateTemplateId", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + certificateTemplateId: z.string().describe(CERTIFICATE_TEMPLATES.DELETE.certificateTemplateId) + }), + response: { + 200: sanitizedCertificateTemplate + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const certificateTemplate = await server.services.certificateTemplate.deleteCertTemplate({ + id: req.params.certificateTemplateId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: certificateTemplate.projectId, + event: { + type: EventType.DELETE_CERTIFICATE_TEMPLATE, + metadata: { + certificateTemplateId: certificateTemplate.id + } + } + }); + + return certificateTemplate; + } + }); + + server.route({ + method: "POST", + url: "/:certificateTemplateId/est-config", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Create Certificate Template EST configuration", + params: z.object({ + certificateTemplateId: z.string().trim() + }), + body: z + .object({ + caChain: z.string().trim().optional(), + passphrase: z.string().min(1), + isEnabled: z.boolean().default(true), + disableBootstrapCertValidation: z.boolean().default(false) + }) + .refine( + ({ caChain, disableBootstrapCertValidation }) => + disableBootstrapCertValidation || (!disableBootstrapCertValidation && caChain), + "CA chain is required" + ), + response: { + 200: sanitizedEstConfig + } + }, + handler: async (req) => { + const estConfig = await server.services.certificateTemplate.createEstConfiguration({ + certificateTemplateId: req.params.certificateTemplateId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: estConfig.projectId, + event: { + type: EventType.CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG, + metadata: { + certificateTemplateId: estConfig.certificateTemplateId, + isEnabled: estConfig.isEnabled as boolean + } + } + }); + + return estConfig; + } + }); + + server.route({ + method: "PATCH", + url: "/:certificateTemplateId/est-config", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Update Certificate Template EST configuration", + params: z.object({ + certificateTemplateId: z.string().trim() + }), + body: z.object({ + caChain: z.string().trim().optional(), + passphrase: z.string().min(1).optional(), + disableBootstrapCertValidation: z.boolean().optional(), + isEnabled: z.boolean().optional() + }), + response: { + 200: sanitizedEstConfig + } + }, + handler: async (req) => { + const estConfig = await server.services.certificateTemplate.updateEstConfiguration({ + certificateTemplateId: req.params.certificateTemplateId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: estConfig.projectId, + event: { + type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG, + metadata: { + certificateTemplateId: estConfig.certificateTemplateId, + isEnabled: estConfig.isEnabled as boolean + } + } + }); + + return estConfig; + } + }); + + server.route({ + method: "GET", + url: "/:certificateTemplateId/est-config", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get Certificate Template EST configuration", + params: z.object({ + certificateTemplateId: z.string().trim() + }), + response: { + 200: sanitizedEstConfig.extend({ + caChain: z.string() + }) + } + }, + handler: async (req) => { + const estConfig = await server.services.certificateTemplate.getEstConfiguration({ + isInternal: false, + certificateTemplateId: req.params.certificateTemplateId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: estConfig.projectId, + event: { + type: EventType.GET_CERTIFICATE_TEMPLATE_EST_CONFIG, + metadata: { + certificateTemplateId: estConfig.certificateTemplateId + } + } + }); + + return estConfig; + } + }); +}; diff --git a/backend/src/server/routes/v1/cmek-router.ts b/backend/src/server/routes/v1/cmek-router.ts new file mode 100644 index 0000000000..18d13e67fa --- /dev/null +++ b/backend/src/server/routes/v1/cmek-router.ts @@ -0,0 +1,331 @@ +import slugify from "@sindresorhus/slugify"; +import { z } from "zod"; + +import { InternalKmsSchema, KmsKeysSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { KMS } from "@app/lib/api-docs"; +import { getBase64SizeInBytes, isBase64 } from "@app/lib/base64"; +import { SymmetricEncryption } from "@app/lib/crypto/cipher"; +import { OrderByDirection } from "@app/lib/types"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { CmekOrderBy } from "@app/services/cmek/cmek-types"; + +const keyNameSchema = z + .string() + .trim() + .min(1) + .max(32) + .toLowerCase() + .refine((v) => slugify(v) === v, { + message: "Name must be slug friendly" + }); +const keyDescriptionSchema = z.string().trim().max(500).optional(); + +const base64Schema = z.string().superRefine((val, ctx) => { + if (!isBase64(val)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "plaintext must be base64 encoded" + }); + } + + if (getBase64SizeInBytes(val) > 4096) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "data cannot exceed 4096 bytes" + }); + } +}); + +export const registerCmekRouter = async (server: FastifyZodProvider) => { + // create encryption key + server.route({ + method: "POST", + url: "/keys", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Create KMS key", + body: z.object({ + projectId: z.string().describe(KMS.CREATE_KEY.projectId), + name: keyNameSchema.describe(KMS.CREATE_KEY.name), + description: keyDescriptionSchema.describe(KMS.CREATE_KEY.description), + encryptionAlgorithm: z + .nativeEnum(SymmetricEncryption) + .optional() + .default(SymmetricEncryption.AES_GCM_256) + .describe(KMS.CREATE_KEY.encryptionAlgorithm) // eventually will support others + }), + response: { + 200: z.object({ + key: KmsKeysSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { + body: { projectId, name, description, encryptionAlgorithm }, + permission + } = req; + + const cmek = await server.services.cmek.createCmek( + { orgId: permission.orgId, projectId, name, description, encryptionAlgorithm }, + permission + ); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId, + event: { + type: EventType.CREATE_CMEK, + metadata: { + keyId: cmek.id, + name, + description, + encryptionAlgorithm + } + } + }); + + return { key: cmek }; + } + }); + + // update KMS key + server.route({ + method: "PATCH", + url: "/keys/:keyId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Update KMS key", + params: z.object({ + keyId: z.string().uuid().describe(KMS.UPDATE_KEY.keyId) + }), + body: z.object({ + name: keyNameSchema.optional().describe(KMS.UPDATE_KEY.name), + isDisabled: z.boolean().optional().describe(KMS.UPDATE_KEY.isDisabled), + description: keyDescriptionSchema.describe(KMS.UPDATE_KEY.description) + }), + response: { + 200: z.object({ + key: KmsKeysSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { + params: { keyId }, + body, + permission + } = req; + + const cmek = await server.services.cmek.updateCmekById({ keyId, ...body }, permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: permission.orgId, + event: { + type: EventType.UPDATE_CMEK, + metadata: { + keyId, + ...body + } + } + }); + + return { key: cmek }; + } + }); + + // delete KMS key + server.route({ + method: "DELETE", + url: "/keys/:keyId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Delete KMS key", + params: z.object({ + keyId: z.string().uuid().describe(KMS.DELETE_KEY.keyId) + }), + response: { + 200: z.object({ + key: KmsKeysSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { + params: { keyId }, + permission + } = req; + + const cmek = await server.services.cmek.deleteCmekById(keyId, permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: permission.orgId, + event: { + type: EventType.DELETE_CMEK, + metadata: { + keyId + } + } + }); + + return { key: cmek }; + } + }); + + // list KMS keys + server.route({ + method: "GET", + url: "/keys", + config: { + rateLimit: readLimit + }, + schema: { + description: "List KMS keys", + querystring: z.object({ + projectId: z.string().describe(KMS.LIST_KEYS.projectId), + offset: z.coerce.number().min(0).optional().default(0).describe(KMS.LIST_KEYS.offset), + limit: z.coerce.number().min(1).max(100).optional().default(100).describe(KMS.LIST_KEYS.limit), + orderBy: z.nativeEnum(CmekOrderBy).optional().default(CmekOrderBy.Name).describe(KMS.LIST_KEYS.orderBy), + orderDirection: z + .nativeEnum(OrderByDirection) + .optional() + .default(OrderByDirection.ASC) + .describe(KMS.LIST_KEYS.orderDirection), + search: z.string().trim().optional().describe(KMS.LIST_KEYS.search) + }), + response: { + 200: z.object({ + keys: KmsKeysSchema.merge(InternalKmsSchema.pick({ version: true, encryptionAlgorithm: true })).array(), + totalCount: z.number() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { + query: { projectId, ...dto }, + permission + } = req; + + const { cmeks, totalCount } = await server.services.cmek.listCmeksByProjectId({ projectId, ...dto }, permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId, + event: { + type: EventType.GET_CMEKS, + metadata: { + keyIds: cmeks.map((key) => key.id) + } + } + }); + + return { keys: cmeks, totalCount }; + } + }); + + // encrypt data + server.route({ + method: "POST", + url: "/keys/:keyId/encrypt", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Encrypt data with KMS key", + params: z.object({ + keyId: z.string().uuid().describe(KMS.ENCRYPT.keyId) + }), + body: z.object({ + plaintext: base64Schema.describe(KMS.ENCRYPT.plaintext) + }), + response: { + 200: z.object({ + ciphertext: z.string() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { + params: { keyId }, + body: { plaintext }, + permission + } = req; + + const ciphertext = await server.services.cmek.cmekEncrypt({ keyId, plaintext }, permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: permission.orgId, + event: { + type: EventType.CMEK_ENCRYPT, + metadata: { + keyId + } + } + }); + + return { ciphertext }; + } + }); + + server.route({ + method: "POST", + url: "/keys/:keyId/decrypt", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Decrypt data with KMS key", + params: z.object({ + keyId: z.string().uuid().describe(KMS.DECRYPT.keyId) + }), + body: z.object({ + ciphertext: base64Schema.describe(KMS.DECRYPT.ciphertext) + }), + response: { + 200: z.object({ + plaintext: z.string() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { + params: { keyId }, + body: { ciphertext }, + permission + } = req; + + const plaintext = await server.services.cmek.cmekDecrypt({ keyId, ciphertext }, permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: permission.orgId, + event: { + type: EventType.CMEK_DECRYPT, + metadata: { + keyId + } + } + }); + + return { plaintext }; + } + }); +}; diff --git a/backend/src/server/routes/v1/dashboard-router.ts b/backend/src/server/routes/v1/dashboard-router.ts new file mode 100644 index 0000000000..d3975bbc2d --- /dev/null +++ b/backend/src/server/routes/v1/dashboard-router.ts @@ -0,0 +1,930 @@ +import { ForbiddenError, subject } from "@casl/ability"; +import { z } from "zod"; + +import { SecretFoldersSchema, SecretImportsSchema, SecretTagsSchema } from "@app/db/schemas"; +import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types"; +import { + ProjectPermissionDynamicSecretActions, + ProjectPermissionSub +} from "@app/ee/services/permission/project-permission"; +import { DASHBOARD } from "@app/lib/api-docs"; +import { BadRequestError } from "@app/lib/errors"; +import { removeTrailingSlash } from "@app/lib/fn"; +import { OrderByDirection } from "@app/lib/types"; +import { secretsLimit } from "@app/server/config/rateLimiter"; +import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; +import { getUserAgentType } from "@app/server/plugins/audit-log"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { SanitizedDynamicSecretSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { SecretsOrderBy } from "@app/services/secret/secret-types"; +import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; + +const MAX_DEEP_SEARCH_LIMIT = 500; // arbitrary limit to prevent excessive results + +// handle querystring boolean values +const booleanSchema = z + .union([z.boolean(), z.string().trim()]) + .transform((value) => { + if (typeof value === "string") { + // ie if not empty, 0 or false, return true + return Boolean(value) && Number(value) !== 0 && value.toLowerCase() !== "false"; + } + + return value; + }) + .optional() + .default(true); + +const parseSecretPathSearch = (search?: string) => { + if (!search) + return { + searchName: "", + searchPath: "" + }; + + if (!search.includes("/")) + return { + searchName: search, + searchPath: "" + }; + + if (search === "/") + return { + searchName: "", + searchPath: "/" + }; + + const [searchName, ...searchPathSegments] = search.split("/").reverse(); + let searchPath = removeTrailingSlash(searchPathSegments.reverse().join("/").toLowerCase()); + if (!searchPath.startsWith("/")) searchPath = `/${searchPath}`; + + return { + searchName, + searchPath + }; +}; + +export const registerDashboardRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/secrets-overview", + config: { + rateLimit: secretsLimit + }, + schema: { + description: "List project secrets overview", + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + projectId: z.string().trim().describe(DASHBOARD.SECRET_OVERVIEW_LIST.projectId), + environments: z + .string() + .trim() + .transform(decodeURIComponent) + .describe(DASHBOARD.SECRET_OVERVIEW_LIST.environments), + secretPath: z + .string() + .trim() + .default("/") + .transform(removeTrailingSlash) + .describe(DASHBOARD.SECRET_OVERVIEW_LIST.secretPath), + offset: z.coerce.number().min(0).optional().default(0).describe(DASHBOARD.SECRET_OVERVIEW_LIST.offset), + limit: z.coerce.number().min(1).max(100).optional().default(100).describe(DASHBOARD.SECRET_OVERVIEW_LIST.limit), + orderBy: z + .nativeEnum(SecretsOrderBy) + .default(SecretsOrderBy.Name) + .describe(DASHBOARD.SECRET_OVERVIEW_LIST.orderBy) + .optional(), + orderDirection: z + .nativeEnum(OrderByDirection) + .default(OrderByDirection.ASC) + .describe(DASHBOARD.SECRET_OVERVIEW_LIST.orderDirection) + .optional(), + search: z.string().trim().describe(DASHBOARD.SECRET_OVERVIEW_LIST.search).optional(), + includeSecrets: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeSecrets), + includeFolders: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeFolders), + includeDynamicSecrets: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeDynamicSecrets) + }), + response: { + 200: z.object({ + folders: SecretFoldersSchema.extend({ environment: z.string() }).array().optional(), + dynamicSecrets: SanitizedDynamicSecretSchema.extend({ environment: z.string() }).array().optional(), + secrets: secretRawSchema + .extend({ + secretPath: z.string().optional(), + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + color: true + }) + .extend({ name: z.string() }) + .array() + .optional() + }) + .array() + .optional(), + totalFolderCount: z.number().optional(), + totalDynamicSecretCount: z.number().optional(), + totalSecretCount: z.number().optional(), + totalCount: z.number() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { + secretPath, + projectId, + limit, + offset, + search, + orderBy, + orderDirection, + includeFolders, + includeSecrets, + includeDynamicSecrets + } = req.query; + + const environments = req.query.environments.split(","); + + if (!projectId || environments.length === 0) + throw new BadRequestError({ message: "Missing workspace id or environment(s)" }); + + const { shouldUseSecretV2Bridge } = await server.services.projectBot.getBotKey(projectId); + + // prevent older projects from accessing endpoint + if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" }); + + let remainingLimit = limit; + let adjustedOffset = offset; + + let folders: Awaited> | undefined; + let secrets: Awaited> | undefined; + let dynamicSecrets: + | Awaited> + | undefined; + + let totalFolderCount: number | undefined; + let totalDynamicSecretCount: number | undefined; + let totalSecretCount: number | undefined; + + if (includeFolders) { + // this is the unique count, ie duplicate folders across envs only count as 1 + totalFolderCount = await server.services.folder.getProjectFolderCount({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: req.query.projectId, + path: secretPath, + environments, + search + }); + + if (remainingLimit > 0 && totalFolderCount > adjustedOffset) { + folders = await server.services.folder.getFoldersMultiEnv({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + environments, + path: secretPath, + orderBy, + orderDirection, + search, + limit: remainingLimit, + offset: adjustedOffset + }); + + // get the count of unique folder names to properly adjust remaining limit + const uniqueFolderCount = new Set(folders.map((folder) => folder.name)).size; + + remainingLimit -= uniqueFolderCount; + adjustedOffset = 0; + } else { + adjustedOffset = Math.max(0, adjustedOffset - totalFolderCount); + } + } + + if (!includeDynamicSecrets && !includeSecrets) + return { + folders, + totalFolderCount, + totalCount: totalFolderCount ?? 0 + }; + + const { permission } = await server.services.permission.getProjectPermission( + req.permission.type, + req.permission.id, + projectId, + req.permission.authMethod, + req.permission.orgId + ); + + const allowedDynamicSecretEnvironments = // filter envs user has access to + environments.filter((environment) => + permission.can( + ProjectPermissionDynamicSecretActions.Lease, + subject(ProjectPermissionSub.DynamicSecrets, { environment, secretPath }) + ) + ); + + if (includeDynamicSecrets && allowedDynamicSecretEnvironments.length) { + // this is the unique count, ie duplicate secrets across envs only count as 1 + totalDynamicSecretCount = await server.services.dynamicSecret.getCountMultiEnv({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + search, + environmentSlugs: allowedDynamicSecretEnvironments, + path: secretPath, + isInternal: true + }); + + if (remainingLimit > 0 && totalDynamicSecretCount > adjustedOffset) { + dynamicSecrets = await server.services.dynamicSecret.listDynamicSecretsByEnvs({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + search, + orderBy, + orderDirection, + environmentSlugs: allowedDynamicSecretEnvironments, + path: secretPath, + limit: remainingLimit, + offset: adjustedOffset, + isInternal: true + }); + + // get the count of unique dynamic secret names to properly adjust remaining limit + const uniqueDynamicSecretsCount = new Set(dynamicSecrets.map((dynamicSecret) => dynamicSecret.name)).size; + + remainingLimit -= uniqueDynamicSecretsCount; + adjustedOffset = 0; + } else { + adjustedOffset = Math.max(0, adjustedOffset - totalDynamicSecretCount); + } + } + + if (includeSecrets) { + // this is the unique count, ie duplicate secrets across envs only count as 1 + totalSecretCount = await server.services.secret.getSecretsCountMultiEnv({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + environments, + actorAuthMethod: req.permission.authMethod, + projectId, + path: secretPath, + search, + isInternal: true + }); + + if (remainingLimit > 0 && totalSecretCount > adjustedOffset) { + secrets = await server.services.secret.getSecretsRawMultiEnv({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + environments, + actorAuthMethod: req.permission.authMethod, + projectId, + path: secretPath, + orderBy, + orderDirection, + search, + limit: remainingLimit, + offset: adjustedOffset, + isInternal: true + }); + + for await (const environment of environments) { + const secretCountFromEnv = secrets.filter((secret) => secret.environment === environment).length; + + if (secretCountFromEnv) { + await server.services.auditLog.createAuditLog({ + projectId, + ...req.auditLogInfo, + event: { + type: EventType.GET_SECRETS, + metadata: { + environment, + secretPath, + numberOfSecrets: secretCountFromEnv + } + } + }); + + if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) { + await server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.SecretPulled, + distinctId: getTelemetryDistinctId(req), + properties: { + numberOfSecrets: secretCountFromEnv, + workspaceId: projectId, + environment, + secretPath, + channel: getUserAgentType(req.headers["user-agent"]), + ...req.auditLogInfo + } + }); + } + } + } + } + } + + return { + folders, + dynamicSecrets, + secrets, + totalFolderCount, + totalDynamicSecretCount, + totalSecretCount, + totalCount: (totalFolderCount ?? 0) + (totalDynamicSecretCount ?? 0) + (totalSecretCount ?? 0) + }; + } + }); + + server.route({ + method: "GET", + url: "/secrets-details", + config: { + rateLimit: secretsLimit + }, + schema: { + description: "List project secrets details", + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + projectId: z.string().trim().describe(DASHBOARD.SECRET_DETAILS_LIST.projectId), + environment: z.string().trim().describe(DASHBOARD.SECRET_DETAILS_LIST.environment), + secretPath: z + .string() + .trim() + .default("/") + .transform(removeTrailingSlash) + .describe(DASHBOARD.SECRET_DETAILS_LIST.secretPath), + offset: z.coerce.number().min(0).optional().default(0).describe(DASHBOARD.SECRET_DETAILS_LIST.offset), + limit: z.coerce.number().min(1).max(100).optional().default(100).describe(DASHBOARD.SECRET_DETAILS_LIST.limit), + orderBy: z + .nativeEnum(SecretsOrderBy) + .default(SecretsOrderBy.Name) + .describe(DASHBOARD.SECRET_DETAILS_LIST.orderBy) + .optional(), + orderDirection: z + .nativeEnum(OrderByDirection) + .default(OrderByDirection.ASC) + .describe(DASHBOARD.SECRET_DETAILS_LIST.orderDirection) + .optional(), + search: z.string().trim().describe(DASHBOARD.SECRET_DETAILS_LIST.search).optional(), + tags: z.string().trim().transform(decodeURIComponent).describe(DASHBOARD.SECRET_DETAILS_LIST.tags).optional(), + includeSecrets: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeSecrets), + includeFolders: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeFolders), + includeDynamicSecrets: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeDynamicSecrets), + includeImports: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeImports) + }), + response: { + 200: z.object({ + imports: SecretImportsSchema.omit({ importEnv: true }) + .extend({ + importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() }) + }) + .array() + .optional(), + folders: SecretFoldersSchema.array().optional(), + dynamicSecrets: SanitizedDynamicSecretSchema.array().optional(), + secrets: secretRawSchema + .extend({ + secretPath: z.string().optional(), + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + color: true + }) + .extend({ name: z.string() }) + .array() + .optional() + }) + .array() + .optional(), + totalImportCount: z.number().optional(), + totalFolderCount: z.number().optional(), + totalDynamicSecretCount: z.number().optional(), + totalSecretCount: z.number().optional(), + totalCount: z.number() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { + secretPath, + environment, + projectId, + limit, + offset, + search, + orderBy, + orderDirection, + includeFolders, + includeSecrets, + includeDynamicSecrets, + includeImports + } = req.query; + + if (!projectId || !environment) throw new BadRequestError({ message: "Missing workspace id or environment" }); + + const { shouldUseSecretV2Bridge } = await server.services.projectBot.getBotKey(projectId); + + // prevent older projects from accessing endpoint + if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" }); + + const tags = req.query.tags?.split(",") ?? []; + + let remainingLimit = limit; + let adjustedOffset = offset; + + let imports: Awaited> | undefined; + let folders: Awaited> | undefined; + let secrets: Awaited>["secrets"] | undefined; + let dynamicSecrets: Awaited> | undefined; + + let totalImportCount: number | undefined; + let totalFolderCount: number | undefined; + let totalDynamicSecretCount: number | undefined; + let totalSecretCount: number | undefined; + + if (includeImports) { + totalImportCount = await server.services.secretImport.getProjectImportCount({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + environment, + path: secretPath, + search + }); + + if (remainingLimit > 0 && totalImportCount > adjustedOffset) { + imports = await server.services.secretImport.getImports({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + environment, + path: secretPath, + search, + limit: remainingLimit, + offset: adjustedOffset + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.query.projectId, + event: { + type: EventType.GET_SECRET_IMPORTS, + metadata: { + environment, + folderId: imports?.[0]?.folderId, + numberOfImports: imports.length + } + } + }); + + remainingLimit -= imports.length; + adjustedOffset = 0; + } else { + adjustedOffset = Math.max(0, adjustedOffset - totalImportCount); + } + } + + if (includeFolders) { + totalFolderCount = await server.services.folder.getProjectFolderCount({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + path: secretPath, + environments: [environment], + search + }); + + if (remainingLimit > 0 && totalFolderCount > adjustedOffset) { + folders = await server.services.folder.getFolders({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + environment, + path: secretPath, + orderBy, + orderDirection, + search, + limit: remainingLimit, + offset: adjustedOffset + }); + + remainingLimit -= folders.length; + adjustedOffset = 0; + } else { + adjustedOffset = Math.max(0, adjustedOffset - totalFolderCount); + } + } + + try { + if (includeDynamicSecrets) { + totalDynamicSecretCount = await server.services.dynamicSecret.getDynamicSecretCount({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + search, + environmentSlug: environment, + path: secretPath + }); + + if (remainingLimit > 0 && totalDynamicSecretCount > adjustedOffset) { + dynamicSecrets = await server.services.dynamicSecret.listDynamicSecretsByEnv({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + search, + orderBy, + orderDirection, + environmentSlug: environment, + path: secretPath, + limit: remainingLimit, + offset: adjustedOffset + }); + + remainingLimit -= dynamicSecrets.length; + adjustedOffset = 0; + } else { + adjustedOffset = Math.max(0, adjustedOffset - totalDynamicSecretCount); + } + } + + if (includeSecrets) { + totalSecretCount = await server.services.secret.getSecretsCount({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + environment, + actorAuthMethod: req.permission.authMethod, + projectId, + path: secretPath, + search, + tagSlugs: tags + }); + + if (remainingLimit > 0 && totalSecretCount > adjustedOffset) { + const secretsRaw = await server.services.secret.getSecretsRaw({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + environment, + actorAuthMethod: req.permission.authMethod, + projectId, + path: secretPath, + orderBy, + orderDirection, + search, + limit: remainingLimit, + offset: adjustedOffset, + tagSlugs: tags + }); + + secrets = secretsRaw.secrets; + + await server.services.auditLog.createAuditLog({ + projectId, + ...req.auditLogInfo, + event: { + type: EventType.GET_SECRETS, + metadata: { + environment, + secretPath, + numberOfSecrets: secrets.length + } + } + }); + + if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) { + await server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.SecretPulled, + distinctId: getTelemetryDistinctId(req), + properties: { + numberOfSecrets: secrets.length, + workspaceId: projectId, + environment, + secretPath, + channel: getUserAgentType(req.headers["user-agent"]), + ...req.auditLogInfo + } + }); + } + } + } + } catch (error) { + if (!(error instanceof ForbiddenError)) { + throw error; + } + } + + return { + imports, + folders, + dynamicSecrets, + secrets, + totalImportCount, + totalFolderCount, + totalDynamicSecretCount, + totalSecretCount, + totalCount: + (totalImportCount ?? 0) + (totalFolderCount ?? 0) + (totalDynamicSecretCount ?? 0) + (totalSecretCount ?? 0) + }; + } + }); + + server.route({ + method: "GET", + url: "/secrets-deep-search", + config: { + rateLimit: secretsLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + projectId: z.string().trim(), + environments: z.string().trim().transform(decodeURIComponent), + secretPath: z.string().trim().default("/").transform(removeTrailingSlash), + search: z.string().trim().optional(), + tags: z.string().trim().transform(decodeURIComponent).optional() + }), + response: { + 200: z.object({ + folders: SecretFoldersSchema.extend({ path: z.string() }).array().optional(), + dynamicSecrets: SanitizedDynamicSecretSchema.extend({ path: z.string(), environment: z.string() }) + .array() + .optional(), + secrets: secretRawSchema + .extend({ + secretPath: z.string().optional(), + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + color: true + }) + .extend({ name: z.string() }) + .array() + .optional() + }) + .array() + .optional() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { secretPath, projectId, search } = req.query; + + const environments = req.query.environments.split(",").filter((env) => Boolean(env.trim())); + if (!environments.length) throw new BadRequestError({ message: "One or more environments required" }); + + const tags = req.query.tags?.split(",").filter((tag) => Boolean(tag.trim())) ?? []; + if (!search && !tags.length) throw new BadRequestError({ message: "Search or tags required" }); + + const searchHasTags = Boolean(tags.length); + + const allFolders = await server.services.folder.getFoldersDeepByEnvs( + { + projectId, + environments, + secretPath + }, + req.permission + ); + + const { searchName, searchPath } = parseSecretPathSearch(search); + + const folderMappings = allFolders.map((folder) => ({ + folderId: folder.id, + path: folder.path, + environment: folder.environment + })); + + const sharedFilters = { + search: searchName, + limit: MAX_DEEP_SEARCH_LIMIT, + orderBy: SecretsOrderBy.Name + }; + + const secrets = await server.services.secret.getSecretsRawByFolderMappings( + { + projectId, + folderMappings, + filters: { + ...sharedFilters, + tagSlugs: tags, + includeTagsInSearch: true + } + }, + req.permission + ); + + const dynamicSecrets = searchHasTags + ? [] + : await server.services.dynamicSecret.listDynamicSecretsByFolderIds( + { + projectId, + folderMappings, + filters: sharedFilters + }, + req.permission + ); + + for await (const environment of environments) { + const secretCountForEnv = secrets.filter((secret) => secret.environment === environment).length; + + if (secretCountForEnv) { + await server.services.auditLog.createAuditLog({ + projectId, + ...req.auditLogInfo, + event: { + type: EventType.GET_SECRETS, + metadata: { + environment, + secretPath, + numberOfSecrets: secretCountForEnv + } + } + }); + + if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) { + await server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.SecretPulled, + distinctId: getTelemetryDistinctId(req), + properties: { + numberOfSecrets: secretCountForEnv, + workspaceId: projectId, + environment, + secretPath, + channel: getUserAgentType(req.headers["user-agent"]), + ...req.auditLogInfo + } + }); + } + } + } + + const sliceQuickSearch = (array: T[]) => array.slice(0, 25); + + return { + secrets: sliceQuickSearch( + searchPath ? secrets.filter((secret) => secret.secretPath.endsWith(searchPath)) : secrets + ), + dynamicSecrets: sliceQuickSearch( + searchPath + ? dynamicSecrets.filter((dynamicSecret) => dynamicSecret.path.endsWith(searchPath)) + : dynamicSecrets + ), + folders: searchHasTags + ? [] + : sliceQuickSearch( + allFolders.filter((folder) => { + const [folderName, ...folderPathSegments] = folder.path.split("/").reverse(); + const folderPath = folderPathSegments.reverse().join("/").toLowerCase() || "/"; + + if (searchPath) { + if (searchPath === "/") { + // only show root folders if no folder name search + if (!searchName) return folderPath === searchPath; + + // start partial match on root folders + return folderName.toLowerCase().startsWith(searchName.toLowerCase()); + } + + // support ending partial path match + return ( + folderPath.endsWith(searchPath) && folderName.toLowerCase().startsWith(searchName.toLowerCase()) + ); + } + + // no search path, "fuzzy" match all folders + return folderName.toLowerCase().includes(searchName.toLowerCase()); + }) + ) + }; + } + }); + + server.route({ + method: "GET", + url: "/secrets-by-keys", + config: { + rateLimit: secretsLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + projectId: z.string().trim(), + environment: z.string().trim(), + secretPath: z.string().trim().default("/").transform(removeTrailingSlash), + keys: z.string().trim().transform(decodeURIComponent) + }), + response: { + 200: z.object({ + secrets: secretRawSchema + .extend({ + secretPath: z.string().optional(), + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + color: true + }) + .extend({ name: z.string() }) + .array() + .optional() + }) + .array() + .optional() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { secretPath, projectId, environment } = req.query; + + const keys = req.query.keys?.split(",").filter((key) => Boolean(key.trim())) ?? []; + if (!keys.length) throw new BadRequestError({ message: "One or more keys required" }); + + const { secrets } = await server.services.secret.getSecretsRaw({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + environment, + actorAuthMethod: req.permission.authMethod, + projectId, + path: secretPath, + keys + }); + + await server.services.auditLog.createAuditLog({ + projectId, + ...req.auditLogInfo, + event: { + type: EventType.GET_SECRETS, + metadata: { + environment, + secretPath, + numberOfSecrets: secrets.length + } + } + }); + + if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) { + await server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.SecretPulled, + distinctId: getTelemetryDistinctId(req), + properties: { + numberOfSecrets: secrets.length, + workspaceId: projectId, + environment, + secretPath, + channel: getUserAgentType(req.headers["user-agent"]), + ...req.auditLogInfo + } + }); + } + + return { secrets }; + } + }); +}; diff --git a/backend/src/server/routes/v1/external-group-org-role-mapping-router.ts b/backend/src/server/routes/v1/external-group-org-role-mapping-router.ts new file mode 100644 index 0000000000..032deda7d3 --- /dev/null +++ b/backend/src/server/routes/v1/external-group-org-role-mapping-router.ts @@ -0,0 +1,83 @@ +import slugify from "@sindresorhus/slugify"; +import { z } from "zod"; + +import { ExternalGroupOrgRoleMappingsSchema } from "@app/db/schemas/external-group-org-role-mappings"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +export const registerExternalGroupOrgRoleMappingRouter = async (server: FastifyZodProvider) => { + // get mappings for current org + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + response: { + 200: ExternalGroupOrgRoleMappingsSchema.array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const mappings = server.services.externalGroupOrgRoleMapping.listExternalGroupOrgRoleMappings(req.permission); + + await server.services.auditLog.createAuditLog({ + orgId: req.permission.orgId, + ...req.auditLogInfo, + event: { + type: EventType.GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS + } + }); + + return mappings; + } + }); + + // update mappings for current org + server.route({ + method: "PUT", // using put since this endpoint creates, updates and deletes mappings + url: "/", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + mappings: z + .object({ + groupName: z.string().trim().min(1), + roleSlug: z + .string() + .min(1) + .toLowerCase() + .refine((v) => slugify(v) === v, { + message: "Role must be a valid slug" + }) + }) + .array() + }), + response: { + 200: ExternalGroupOrgRoleMappingsSchema.array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { body, permission } = req; + + const mappings = server.services.externalGroupOrgRoleMapping.updateExternalGroupOrgRoleMappings(body, permission); + + await server.services.auditLog.createAuditLog({ + orgId: permission.orgId, + ...req.auditLogInfo, + event: { + type: EventType.UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS, + metadata: body + } + }); + + return mappings; + } + }); +}; diff --git a/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts b/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts index f8c0451683..9199c21f14 100644 --- a/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts +++ b/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts @@ -22,7 +22,7 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider) schema: { description: "Login with AWS Auth", body: z.object({ - identityId: z.string().describe(AWS_AUTH.LOGIN.identityId), + identityId: z.string().trim().describe(AWS_AUTH.LOGIN.identityId), iamHttpRequestMethod: z.string().default("POST").describe(AWS_AUTH.LOGIN.iamHttpRequestMethod), iamRequestBody: z.string().describe(AWS_AUTH.LOGIN.iamRequestBody), iamRequestHeaders: z.string().describe(AWS_AUTH.LOGIN.iamRequestHeaders) @@ -77,35 +77,45 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider) } ], params: z.object({ - identityId: z.string().trim() + identityId: z.string().trim().describe(AWS_AUTH.ATTACH.identityId) }), body: z.object({ - stsEndpoint: z.string().trim().min(1).default("https://sts.amazonaws.com/"), - allowedPrincipalArns: validatePrincipalArns, - allowedAccountIds: validateAccountIds, + stsEndpoint: z + .string() + .trim() + .min(1) + .default("https://sts.amazonaws.com/") + .describe(AWS_AUTH.ATTACH.stsEndpoint), + allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.ATTACH.allowedPrincipalArns), + allowedAccountIds: validateAccountIds.describe(AWS_AUTH.ATTACH.allowedAccountIds), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]), + .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]) + .describe(AWS_AUTH.ATTACH.accessTokenTrustedIps), accessTokenTTL: z .number() .int() .min(1) + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenTTL must have a non zero number" }) - .default(2592000), + .default(2592000) + .describe(AWS_AUTH.ATTACH.accessTokenTTL), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) - .default(2592000), - accessTokenNumUsesLimit: z.number().int().min(0).default(0) + .default(2592000) + .describe(AWS_AUTH.ATTACH.accessTokenMaxTTL), + accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AWS_AUTH.ATTACH.accessTokenNumUsesLimit) }), response: { 200: z.object({ @@ -160,28 +170,31 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider) } ], params: z.object({ - identityId: z.string() + identityId: z.string().describe(AWS_AUTH.UPDATE.identityId) }), body: z.object({ - stsEndpoint: z.string().trim().min(1).optional(), - allowedPrincipalArns: validatePrincipalArns, - allowedAccountIds: validateAccountIds, + stsEndpoint: z.string().trim().min(1).optional().describe(AWS_AUTH.UPDATE.stsEndpoint), + allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.UPDATE.allowedPrincipalArns), + allowedAccountIds: validateAccountIds.describe(AWS_AUTH.UPDATE.allowedAccountIds), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .optional(), - accessTokenTTL: z.number().int().min(0).optional(), - accessTokenNumUsesLimit: z.number().int().min(0).optional(), + .optional() + .describe(AWS_AUTH.UPDATE.accessTokenTrustedIps), + accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AWS_AUTH.UPDATE.accessTokenTTL), + accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AWS_AUTH.UPDATE.accessTokenNumUsesLimit), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) .optional() + .describe(AWS_AUTH.UPDATE.accessTokenMaxTTL) }), response: { 200: z.object({ @@ -236,7 +249,7 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider) } ], params: z.object({ - identityId: z.string() + identityId: z.string().describe(AWS_AUTH.RETRIEVE.identityId) }), response: { 200: z.object({ @@ -266,4 +279,51 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider) return { identityAwsAuth }; } }); + + server.route({ + method: "DELETE", + url: "/aws-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete AWS Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(AWS_AUTH.REVOKE.identityId) + }), + response: { + 200: z.object({ + identityAwsAuth: IdentityAwsAuthsSchema + }) + } + }, + handler: async (req) => { + const identityAwsAuth = await server.services.identityAwsAuth.revokeIdentityAwsAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityAwsAuth.orgId, + event: { + type: EventType.REVOKE_IDENTITY_AWS_AUTH, + metadata: { + identityId: identityAwsAuth.identityId + } + } + }); + + return { identityAwsAuth }; + } + }); }; diff --git a/backend/src/server/routes/v1/identity-azure-auth-router.ts b/backend/src/server/routes/v1/identity-azure-auth-router.ts index d10cd131b2..6aee4504f5 100644 --- a/backend/src/server/routes/v1/identity-azure-auth-router.ts +++ b/backend/src/server/routes/v1/identity-azure-auth-router.ts @@ -2,12 +2,15 @@ import { z } from "zod"; import { IdentityAzureAuthsSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { AZURE_AUTH } from "@app/lib/api-docs"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { TIdentityTrustedIp } from "@app/services/identity/identity-types"; import { validateAzureAuthField } from "@app/services/identity-azure-auth/identity-azure-auth-validators"; +import {} from "../sanitizedSchemas"; + export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider) => { server.route({ method: "POST", @@ -18,7 +21,7 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider schema: { description: "Login with Azure Auth", body: z.object({ - identityId: z.string(), + identityId: z.string().trim().describe(AZURE_AUTH.LOGIN.identityId), jwt: z.string() }), response: { @@ -71,35 +74,40 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider } ], params: z.object({ - identityId: z.string().trim() + identityId: z.string().trim().describe(AZURE_AUTH.LOGIN.identityId) }), body: z.object({ - tenantId: z.string().trim(), - resource: z.string().trim(), - allowedServicePrincipalIds: validateAzureAuthField, + tenantId: z.string().trim().describe(AZURE_AUTH.ATTACH.tenantId), + resource: z.string().trim().describe(AZURE_AUTH.ATTACH.resource), + allowedServicePrincipalIds: validateAzureAuthField.describe(AZURE_AUTH.ATTACH.allowedServicePrincipalIds), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]), + .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]) + .describe(AZURE_AUTH.ATTACH.accessTokenTrustedIps), accessTokenTTL: z .number() .int() .min(1) + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenTTL must have a non zero number" }) - .default(2592000), + .default(2592000) + .describe(AZURE_AUTH.ATTACH.accessTokenTTL), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) - .default(2592000), - accessTokenNumUsesLimit: z.number().int().min(0).default(0) + .default(2592000) + .describe(AZURE_AUTH.ATTACH.accessTokenMaxTTL), + accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AZURE_AUTH.ATTACH.accessTokenNumUsesLimit) }), response: { 200: z.object({ @@ -153,28 +161,33 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider } ], params: z.object({ - identityId: z.string().trim() + identityId: z.string().trim().describe(AZURE_AUTH.UPDATE.identityId) }), body: z.object({ - tenantId: z.string().trim().optional(), - resource: z.string().trim().optional(), - allowedServicePrincipalIds: validateAzureAuthField.optional(), + tenantId: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.tenantId), + resource: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.resource), + allowedServicePrincipalIds: validateAzureAuthField + .optional() + .describe(AZURE_AUTH.UPDATE.allowedServicePrincipalIds), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .optional(), - accessTokenTTL: z.number().int().min(0).optional(), - accessTokenNumUsesLimit: z.number().int().min(0).optional(), + .optional() + .describe(AZURE_AUTH.UPDATE.accessTokenTrustedIps), + accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AZURE_AUTH.UPDATE.accessTokenTTL), + accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AZURE_AUTH.UPDATE.accessTokenNumUsesLimit), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) .optional() + .describe(AZURE_AUTH.UPDATE.accessTokenMaxTTL) }), response: { 200: z.object({ @@ -228,7 +241,7 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider } ], params: z.object({ - identityId: z.string() + identityId: z.string().describe(AZURE_AUTH.RETRIEVE.identityId) }), response: { 200: z.object({ @@ -259,4 +272,51 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider return { identityAzureAuth }; } }); + + server.route({ + method: "DELETE", + url: "/azure-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete Azure Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(AZURE_AUTH.REVOKE.identityId) + }), + response: { + 200: z.object({ + identityAzureAuth: IdentityAzureAuthsSchema + }) + } + }, + handler: async (req) => { + const identityAzureAuth = await server.services.identityAzureAuth.revokeIdentityAzureAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityAzureAuth.orgId, + event: { + type: EventType.REVOKE_IDENTITY_AZURE_AUTH, + metadata: { + identityId: identityAzureAuth.identityId + } + } + }); + + return { identityAzureAuth }; + } + }); }; diff --git a/backend/src/server/routes/v1/identity-gcp-auth-router.ts b/backend/src/server/routes/v1/identity-gcp-auth-router.ts index 34940eb13e..88c5af45fa 100644 --- a/backend/src/server/routes/v1/identity-gcp-auth-router.ts +++ b/backend/src/server/routes/v1/identity-gcp-auth-router.ts @@ -2,6 +2,7 @@ import { z } from "zod"; import { IdentityGcpAuthsSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { GCP_AUTH } from "@app/lib/api-docs"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -18,7 +19,7 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider) schema: { description: "Login with GCP Auth", body: z.object({ - identityId: z.string(), + identityId: z.string().trim().describe(GCP_AUTH.LOGIN.identityId).trim(), jwt: z.string() }), response: { @@ -71,36 +72,41 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider) } ], params: z.object({ - identityId: z.string().trim() + identityId: z.string().trim().describe(GCP_AUTH.ATTACH.identityId) }), body: z.object({ type: z.enum(["iam", "gce"]), - allowedServiceAccounts: validateGcpAuthField, - allowedProjects: validateGcpAuthField, - allowedZones: validateGcpAuthField, + allowedServiceAccounts: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedServiceAccounts), + allowedProjects: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedProjects), + allowedZones: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedZones), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]), + .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]) + .describe(GCP_AUTH.ATTACH.accessTokenTrustedIps), accessTokenTTL: z .number() .int() .min(1) + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenTTL must have a non zero number" }) - .default(2592000), + .default(2592000) + .describe(GCP_AUTH.ATTACH.accessTokenTTL), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) - .default(2592000), - accessTokenNumUsesLimit: z.number().int().min(0).default(0) + .default(2592000) + .describe(GCP_AUTH.ATTACH.accessTokenMaxTTL), + accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(GCP_AUTH.ATTACH.accessTokenNumUsesLimit) }), response: { 200: z.object({ @@ -156,29 +162,32 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider) } ], params: z.object({ - identityId: z.string().trim() + identityId: z.string().trim().describe(GCP_AUTH.UPDATE.identityId) }), body: z.object({ type: z.enum(["iam", "gce"]).optional(), - allowedServiceAccounts: validateGcpAuthField.optional(), - allowedProjects: validateGcpAuthField.optional(), - allowedZones: validateGcpAuthField.optional(), + allowedServiceAccounts: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedServiceAccounts), + allowedProjects: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedProjects), + allowedZones: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedZones), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .optional(), - accessTokenTTL: z.number().int().min(0).optional(), - accessTokenNumUsesLimit: z.number().int().min(0).optional(), + .optional() + .describe(GCP_AUTH.UPDATE.accessTokenTrustedIps), + accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(GCP_AUTH.UPDATE.accessTokenTTL), + accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(GCP_AUTH.UPDATE.accessTokenNumUsesLimit), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) .optional() + .describe(GCP_AUTH.UPDATE.accessTokenMaxTTL) }), response: { 200: z.object({ @@ -234,7 +243,7 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider) } ], params: z.object({ - identityId: z.string() + identityId: z.string().describe(GCP_AUTH.RETRIEVE.identityId) }), response: { 200: z.object({ @@ -265,4 +274,51 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider) return { identityGcpAuth }; } }); + + server.route({ + method: "DELETE", + url: "/gcp-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete GCP Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(GCP_AUTH.REVOKE.identityId) + }), + response: { + 200: z.object({ + identityGcpAuth: IdentityGcpAuthsSchema + }) + } + }, + handler: async (req) => { + const identityGcpAuth = await server.services.identityGcpAuth.revokeIdentityGcpAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityGcpAuth.orgId, + event: { + type: EventType.REVOKE_IDENTITY_GCP_AUTH, + metadata: { + identityId: identityGcpAuth.identityId + } + } + }); + + return { identityGcpAuth }; + } + }); }; diff --git a/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts b/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts index d20ea0edcb..3a71ba7a24 100644 --- a/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts +++ b/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts @@ -2,6 +2,7 @@ import { z } from "zod"; import { IdentityKubernetesAuthsSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { KUBERNETES_AUTH } from "@app/lib/api-docs"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -29,7 +30,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide schema: { description: "Login with Kubernetes Auth", body: z.object({ - identityId: z.string().trim(), + identityId: z.string().trim().describe(KUBERNETES_AUTH.LOGIN.identityId), jwt: z.string().trim() }), response: { @@ -84,38 +85,48 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide } ], params: z.object({ - identityId: z.string().trim() + identityId: z.string().trim().describe(KUBERNETES_AUTH.ATTACH.identityId) }), body: z.object({ - kubernetesHost: z.string().trim().min(1), - caCert: z.string().trim().default(""), - tokenReviewerJwt: z.string().trim().min(1), - allowedNamespaces: z.string(), // TODO: validation - allowedNames: z.string(), - allowedAudience: z.string(), + kubernetesHost: z.string().trim().min(1).describe(KUBERNETES_AUTH.ATTACH.kubernetesHost), + caCert: z.string().trim().default("").describe(KUBERNETES_AUTH.ATTACH.caCert), + tokenReviewerJwt: z.string().trim().min(1).describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt), + allowedNamespaces: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNamespaces), // TODO: validation + allowedNames: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNames), + allowedAudience: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedAudience), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]), + .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]) + .describe(KUBERNETES_AUTH.ATTACH.accessTokenTrustedIps), accessTokenTTL: z .number() .int() .min(1) + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenTTL must have a non zero number" }) - .default(2592000), + .default(2592000) + .describe(KUBERNETES_AUTH.ATTACH.accessTokenTTL), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) - .default(2592000), - accessTokenNumUsesLimit: z.number().int().min(0).default(0) + .default(2592000) + .describe(KUBERNETES_AUTH.ATTACH.accessTokenMaxTTL), + accessTokenNumUsesLimit: z + .number() + .int() + .min(0) + .default(0) + .describe(KUBERNETES_AUTH.ATTACH.accessTokenNumUsesLimit) }), response: { 200: z.object({ @@ -170,35 +181,49 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide } ], params: z.object({ - identityId: z.string() + identityId: z.string().describe(KUBERNETES_AUTH.UPDATE.identityId) }), body: z.object({ - kubernetesHost: z.string().trim().min(1).optional(), - caCert: z.string().trim().optional(), - tokenReviewerJwt: z.string().trim().min(1).optional(), - allowedNamespaces: z.string().optional(), // TODO: validation - allowedNames: z.string().optional(), - allowedAudience: z.string().optional(), + kubernetesHost: z.string().trim().min(1).optional().describe(KUBERNETES_AUTH.UPDATE.kubernetesHost), + caCert: z.string().trim().optional().describe(KUBERNETES_AUTH.UPDATE.caCert), + tokenReviewerJwt: z.string().trim().min(1).optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt), + allowedNamespaces: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNamespaces), // TODO: validation + allowedNames: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNames), + allowedAudience: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedAudience), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() }) .array() .min(1) - .optional(), - accessTokenTTL: z.number().int().min(0).optional(), - accessTokenNumUsesLimit: z.number().int().min(0).optional(), + .optional() + .describe(KUBERNETES_AUTH.UPDATE.accessTokenTrustedIps), + accessTokenTTL: z + .number() + .int() + .min(0) + .max(315360000) + .optional() + .describe(KUBERNETES_AUTH.UPDATE.accessTokenTTL), + accessTokenNumUsesLimit: z + .number() + .int() + .min(0) + .optional() + .describe(KUBERNETES_AUTH.UPDATE.accessTokenNumUsesLimit), accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) .optional() + .describe(KUBERNETES_AUTH.UPDATE.accessTokenMaxTTL) }), response: { 200: z.object({ - identityKubernetesAuth: IdentityKubernetesAuthsSchema + identityKubernetesAuth: IdentityKubernetesAuthResponseSchema }) } }, @@ -249,7 +274,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide } ], params: z.object({ - identityId: z.string() + identityId: z.string().describe(KUBERNETES_AUTH.RETRIEVE.identityId) }), response: { 200: z.object({ @@ -280,4 +305,54 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide return { identityKubernetesAuth: IdentityKubernetesAuthResponseSchema.parse(identityKubernetesAuth) }; } }); + + server.route({ + method: "DELETE", + url: "/kubernetes-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete Kubernetes Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(KUBERNETES_AUTH.REVOKE.identityId) + }), + response: { + 200: z.object({ + identityKubernetesAuth: IdentityKubernetesAuthResponseSchema.omit({ + caCert: true, + tokenReviewerJwt: true + }) + }) + } + }, + handler: async (req) => { + const identityKubernetesAuth = await server.services.identityKubernetesAuth.revokeIdentityKubernetesAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityKubernetesAuth.orgId, + event: { + type: EventType.REVOKE_IDENTITY_KUBERNETES_AUTH, + metadata: { + identityId: identityKubernetesAuth.identityId + } + } + }); + + return { identityKubernetesAuth }; + } + }); }; diff --git a/backend/src/server/routes/v1/identity-oidc-auth-router.ts b/backend/src/server/routes/v1/identity-oidc-auth-router.ts new file mode 100644 index 0000000000..280dbc5d5d --- /dev/null +++ b/backend/src/server/routes/v1/identity-oidc-auth-router.ts @@ -0,0 +1,361 @@ +import { z } from "zod"; + +import { IdentityOidcAuthsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { OIDC_AUTH } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { TIdentityTrustedIp } from "@app/services/identity/identity-types"; +import { + validateOidcAuthAudiencesField, + validateOidcBoundClaimsField +} from "@app/services/identity-oidc-auth/identity-oidc-auth-validators"; + +const IdentityOidcAuthResponseSchema = IdentityOidcAuthsSchema.omit({ + encryptedCaCert: true, + caCertIV: true, + caCertTag: true +}).extend({ + caCert: z.string() +}); + +export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/oidc-auth/login", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Login with OIDC Auth", + body: z.object({ + identityId: z.string().trim().describe(OIDC_AUTH.LOGIN.identityId), + jwt: z.string().trim() + }), + response: { + 200: z.object({ + accessToken: z.string(), + expiresIn: z.coerce.number(), + accessTokenMaxTTL: z.coerce.number(), + tokenType: z.literal("Bearer") + }) + } + }, + handler: async (req) => { + const { identityOidcAuth, accessToken, identityAccessToken, identityMembershipOrg } = + await server.services.identityOidcAuth.login({ + identityId: req.body.identityId, + jwt: req.body.jwt + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityMembershipOrg?.orgId, + event: { + type: EventType.LOGIN_IDENTITY_OIDC_AUTH, + metadata: { + identityId: identityOidcAuth.identityId, + identityAccessTokenId: identityAccessToken.id, + identityOidcAuthId: identityOidcAuth.id + } + } + }); + return { + accessToken, + tokenType: "Bearer" as const, + expiresIn: identityOidcAuth.accessTokenTTL, + accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL + }; + } + }); + + server.route({ + method: "POST", + url: "/oidc-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Attach OIDC Auth configuration onto identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().trim().describe(OIDC_AUTH.ATTACH.identityId) + }), + body: z.object({ + oidcDiscoveryUrl: z.string().url().min(1).describe(OIDC_AUTH.ATTACH.oidcDiscoveryUrl), + caCert: z.string().trim().default("").describe(OIDC_AUTH.ATTACH.caCert), + boundIssuer: z.string().min(1).describe(OIDC_AUTH.ATTACH.boundIssuer), + boundAudiences: validateOidcAuthAudiencesField.describe(OIDC_AUTH.ATTACH.boundAudiences), + boundClaims: validateOidcBoundClaimsField.describe(OIDC_AUTH.ATTACH.boundClaims), + boundSubject: z.string().optional().default("").describe(OIDC_AUTH.ATTACH.boundSubject), + accessTokenTrustedIps: z + .object({ + ipAddress: z.string().trim() + }) + .array() + .min(1) + .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]) + .describe(OIDC_AUTH.ATTACH.accessTokenTrustedIps), + accessTokenTTL: z + .number() + .int() + .min(1) + .max(315360000) + .refine((value) => value !== 0, { + message: "accessTokenTTL must have a non zero number" + }) + .default(2592000) + .describe(OIDC_AUTH.ATTACH.accessTokenTTL), + accessTokenMaxTTL: z + .number() + .int() + .max(315360000) + .refine((value) => value !== 0, { + message: "accessTokenMaxTTL must have a non zero number" + }) + .default(2592000) + .describe(OIDC_AUTH.ATTACH.accessTokenMaxTTL), + accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(OIDC_AUTH.ATTACH.accessTokenNumUsesLimit) + }), + response: { + 200: z.object({ + identityOidcAuth: IdentityOidcAuthResponseSchema + }) + } + }, + handler: async (req) => { + const identityOidcAuth = await server.services.identityOidcAuth.attachOidcAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityOidcAuth.orgId, + event: { + type: EventType.ADD_IDENTITY_OIDC_AUTH, + metadata: { + identityId: identityOidcAuth.identityId, + oidcDiscoveryUrl: identityOidcAuth.oidcDiscoveryUrl, + caCert: identityOidcAuth.caCert, + boundIssuer: identityOidcAuth.boundIssuer, + boundAudiences: identityOidcAuth.boundAudiences, + boundClaims: identityOidcAuth.boundClaims as Record, + boundSubject: identityOidcAuth.boundSubject as string, + accessTokenTTL: identityOidcAuth.accessTokenTTL, + accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL, + accessTokenTrustedIps: identityOidcAuth.accessTokenTrustedIps as TIdentityTrustedIp[], + accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit + } + } + }); + + return { + identityOidcAuth + }; + } + }); + + server.route({ + method: "PATCH", + url: "/oidc-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Update OIDC Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().trim().describe(OIDC_AUTH.UPDATE.identityId) + }), + body: z + .object({ + oidcDiscoveryUrl: z.string().url().min(1).describe(OIDC_AUTH.UPDATE.oidcDiscoveryUrl), + caCert: z.string().trim().default("").describe(OIDC_AUTH.UPDATE.caCert), + boundIssuer: z.string().min(1).describe(OIDC_AUTH.UPDATE.boundIssuer), + boundAudiences: validateOidcAuthAudiencesField.describe(OIDC_AUTH.UPDATE.boundAudiences), + boundClaims: validateOidcBoundClaimsField.describe(OIDC_AUTH.UPDATE.boundClaims), + boundSubject: z.string().optional().default("").describe(OIDC_AUTH.UPDATE.boundSubject), + accessTokenTrustedIps: z + .object({ + ipAddress: z.string().trim() + }) + .array() + .min(1) + .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]) + .describe(OIDC_AUTH.UPDATE.accessTokenTrustedIps), + accessTokenTTL: z + .number() + .int() + .min(1) + .max(315360000) + .refine((value) => value !== 0, { + message: "accessTokenTTL must have a non zero number" + }) + .default(2592000) + .describe(OIDC_AUTH.UPDATE.accessTokenTTL), + accessTokenMaxTTL: z + .number() + .int() + .max(315360000) + .refine((value) => value !== 0, { + message: "accessTokenMaxTTL must have a non zero number" + }) + .default(2592000) + .describe(OIDC_AUTH.UPDATE.accessTokenMaxTTL), + + accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(OIDC_AUTH.UPDATE.accessTokenNumUsesLimit) + }) + .partial(), + response: { + 200: z.object({ + identityOidcAuth: IdentityOidcAuthResponseSchema + }) + } + }, + handler: async (req) => { + const identityOidcAuth = await server.services.identityOidcAuth.updateOidcAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + ...req.body, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityOidcAuth.orgId, + event: { + type: EventType.UPDATE_IDENTITY_OIDC_AUTH, + metadata: { + identityId: identityOidcAuth.identityId, + oidcDiscoveryUrl: identityOidcAuth.oidcDiscoveryUrl, + caCert: identityOidcAuth.caCert, + boundIssuer: identityOidcAuth.boundIssuer, + boundAudiences: identityOidcAuth.boundAudiences, + boundClaims: identityOidcAuth.boundClaims as Record, + boundSubject: identityOidcAuth.boundSubject as string, + accessTokenTTL: identityOidcAuth.accessTokenTTL, + accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL, + accessTokenTrustedIps: identityOidcAuth.accessTokenTrustedIps as TIdentityTrustedIp[], + accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit + } + } + }); + + return { identityOidcAuth }; + } + }); + + server.route({ + method: "GET", + url: "/oidc-auth/identities/:identityId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Retrieve OIDC Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(OIDC_AUTH.RETRIEVE.identityId) + }), + response: { + 200: z.object({ + identityOidcAuth: IdentityOidcAuthResponseSchema + }) + } + }, + handler: async (req) => { + const identityOidcAuth = await server.services.identityOidcAuth.getOidcAuth({ + identityId: req.params.identityId, + actor: req.permission.type, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityOidcAuth.orgId, + event: { + type: EventType.GET_IDENTITY_OIDC_AUTH, + metadata: { + identityId: identityOidcAuth.identityId + } + } + }); + + return { identityOidcAuth }; + } + }); + + server.route({ + method: "DELETE", + url: "/oidc-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete OIDC Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(OIDC_AUTH.REVOKE.identityId) + }), + response: { + 200: z.object({ + identityOidcAuth: IdentityOidcAuthResponseSchema.omit({ + caCert: true + }) + }) + } + }, + handler: async (req) => { + const identityOidcAuth = await server.services.identityOidcAuth.revokeOidcAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityOidcAuth.orgId, + event: { + type: EventType.REVOKE_IDENTITY_OIDC_AUTH, + metadata: { + identityId: identityOidcAuth.identityId + } + } + }); + + return { identityOidcAuth }; + } + }); +}; diff --git a/backend/src/server/routes/v1/identity-router.ts b/backend/src/server/routes/v1/identity-router.ts index e174cf974a..15e6eabef7 100644 --- a/backend/src/server/routes/v1/identity-router.ts +++ b/backend/src/server/routes/v1/identity-router.ts @@ -1,20 +1,22 @@ import { z } from "zod"; -import { IdentitiesSchema, OrgMembershipRole } from "@app/db/schemas"; +import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgMembershipRole, OrgRolesSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { IDENTITIES } from "@app/lib/api-docs"; -import { creationLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; +import { SanitizedProjectSchema } from "../sanitizedSchemas"; + export const registerIdentityRouter = async (server: FastifyZodProvider) => { server.route({ method: "POST", url: "/", config: { - rateLimit: creationLimit + rateLimit: writeLimit }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { @@ -27,11 +29,17 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => { body: z.object({ name: z.string().trim().describe(IDENTITIES.CREATE.name), organizationId: z.string().trim().describe(IDENTITIES.CREATE.organizationId), - role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(IDENTITIES.CREATE.role) + role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(IDENTITIES.CREATE.role), + metadata: z + .object({ key: z.string().trim().min(1), value: z.string().trim().min(1) }) + .array() + .optional() }), response: { 200: z.object({ - identity: IdentitiesSchema + identity: IdentitiesSchema.extend({ + authMethods: z.array(z.string()) + }) }) } }, @@ -91,7 +99,11 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => { }), body: z.object({ name: z.string().trim().optional().describe(IDENTITIES.UPDATE.name), - role: z.string().trim().min(1).optional().describe(IDENTITIES.UPDATE.role) + role: z.string().trim().min(1).optional().describe(IDENTITIES.UPDATE.role), + metadata: z + .object({ key: z.string().trim().min(1), value: z.string().trim().min(1) }) + .array() + .optional() }), response: { 200: z.object({ @@ -170,4 +182,168 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => { return { identity }; } }); + + server.route({ + method: "GET", + url: "/:identityId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get an identity by id", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(IDENTITIES.GET_BY_ID.identityId) + }), + response: { + 200: z.object({ + identity: IdentityOrgMembershipsSchema.extend({ + metadata: z + .object({ + key: z.string().trim().min(1), + id: z.string().trim().min(1), + value: z.string().trim().min(1) + }) + .array() + .optional(), + customRole: OrgRolesSchema.pick({ + id: true, + name: true, + slug: true, + permissions: true, + description: true + }).optional(), + identity: IdentitiesSchema.pick({ name: true, id: true }).extend({ + authMethods: z.array(z.string()) + }) + }) + }) + } + }, + handler: async (req) => { + const identity = await server.services.identity.getIdentityById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.identityId + }); + + return { identity }; + } + }); + + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "List identities", + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + orgId: z.string().describe(IDENTITIES.LIST.orgId) + }), + response: { + 200: z.object({ + identities: IdentityOrgMembershipsSchema.extend({ + customRole: OrgRolesSchema.pick({ + id: true, + name: true, + slug: true, + permissions: true, + description: true + }).optional(), + identity: IdentitiesSchema.pick({ name: true, id: true }).extend({ + authMethods: z.array(z.string()) + }) + }).array(), + totalCount: z.number() + }) + } + }, + handler: async (req) => { + const { identityMemberships, totalCount } = await server.services.identity.listOrgIdentities({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + orgId: req.query.orgId + }); + + return { identities: identityMemberships, totalCount }; + } + }); + + server.route({ + method: "GET", + url: "/:identityId/identity-memberships", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "List project memberships that identity with id is part of", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(IDENTITIES.GET_BY_ID.identityId) + }), + response: { + 200: z.object({ + identityMemberships: z.array( + z.object({ + id: z.string(), + identityId: z.string(), + createdAt: z.date(), + updatedAt: z.date(), + roles: z.array( + z.object({ + id: z.string(), + role: z.string(), + customRoleId: z.string().optional().nullable(), + customRoleName: z.string().optional().nullable(), + customRoleSlug: z.string().optional().nullable(), + isTemporary: z.boolean(), + temporaryMode: z.string().optional().nullable(), + temporaryRange: z.string().nullable().optional(), + temporaryAccessStartTime: z.date().nullable().optional(), + temporaryAccessEndTime: z.date().nullable().optional() + }) + ), + identity: IdentitiesSchema.pick({ name: true, id: true }).extend({ + authMethods: z.array(z.string()) + }), + project: SanitizedProjectSchema.pick({ name: true, id: true }) + }) + ) + }) + } + }, + handler: async (req) => { + const identityMemberships = await server.services.identity.listProjectIdentitiesByIdentityId({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + return { identityMemberships }; + } + }); }; diff --git a/backend/src/server/routes/v1/identity-token-auth-router.ts b/backend/src/server/routes/v1/identity-token-auth-router.ts new file mode 100644 index 0000000000..f367e60334 --- /dev/null +++ b/backend/src/server/routes/v1/identity-token-auth-router.ts @@ -0,0 +1,468 @@ +import { z } from "zod"; + +import { IdentityAccessTokensSchema, IdentityTokenAuthsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { TOKEN_AUTH } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { TIdentityTrustedIp } from "@app/services/identity/identity-types"; + +export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/token-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Attach Token Auth configuration onto identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().trim().describe(TOKEN_AUTH.ATTACH.identityId) + }), + body: z.object({ + accessTokenTrustedIps: z + .object({ + ipAddress: z.string().trim() + }) + .array() + .min(1) + .default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]) + .describe(TOKEN_AUTH.ATTACH.accessTokenTrustedIps), + accessTokenTTL: z + .number() + .int() + .min(1) + .max(315360000) + .refine((value) => value !== 0, { + message: "accessTokenTTL must have a non zero number" + }) + .default(2592000) + .describe(TOKEN_AUTH.ATTACH.accessTokenTTL), + accessTokenMaxTTL: z + .number() + .int() + .max(315360000) + .refine((value) => value !== 0, { + message: "accessTokenMaxTTL must have a non zero number" + }) + .default(2592000) + .describe(TOKEN_AUTH.ATTACH.accessTokenMaxTTL), + accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(TOKEN_AUTH.ATTACH.accessTokenNumUsesLimit) + }), + response: { + 200: z.object({ + identityTokenAuth: IdentityTokenAuthsSchema + }) + } + }, + handler: async (req) => { + const identityTokenAuth = await server.services.identityTokenAuth.attachTokenAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityTokenAuth.orgId, + event: { + type: EventType.ADD_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: identityTokenAuth.identityId, + accessTokenTTL: identityTokenAuth.accessTokenTTL, + accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL, + accessTokenTrustedIps: identityTokenAuth.accessTokenTrustedIps as TIdentityTrustedIp[], + accessTokenNumUsesLimit: identityTokenAuth.accessTokenNumUsesLimit + } + } + }); + + return { + identityTokenAuth + }; + } + }); + + server.route({ + method: "PATCH", + url: "/token-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Update Token Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().trim().describe(TOKEN_AUTH.UPDATE.identityId) + }), + body: z.object({ + accessTokenTrustedIps: z + .object({ + ipAddress: z.string().trim() + }) + .array() + .min(1) + .optional() + .describe(TOKEN_AUTH.UPDATE.accessTokenTrustedIps), + accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(TOKEN_AUTH.UPDATE.accessTokenTTL), + accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(TOKEN_AUTH.UPDATE.accessTokenNumUsesLimit), + accessTokenMaxTTL: z + .number() + .int() + .max(315360000) + .refine((value) => value !== 0, { + message: "accessTokenMaxTTL must have a non zero number" + }) + .optional() + .describe(TOKEN_AUTH.UPDATE.accessTokenMaxTTL) + }), + response: { + 200: z.object({ + identityTokenAuth: IdentityTokenAuthsSchema + }) + } + }, + handler: async (req) => { + const identityTokenAuth = await server.services.identityTokenAuth.updateTokenAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + ...req.body, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityTokenAuth.orgId, + event: { + type: EventType.UPDATE_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: identityTokenAuth.identityId, + accessTokenTTL: identityTokenAuth.accessTokenTTL, + accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL, + accessTokenTrustedIps: identityTokenAuth.accessTokenTrustedIps as TIdentityTrustedIp[], + accessTokenNumUsesLimit: identityTokenAuth.accessTokenNumUsesLimit + } + } + }); + + return { + identityTokenAuth + }; + } + }); + + server.route({ + method: "GET", + url: "/token-auth/identities/:identityId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Retrieve Token Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(TOKEN_AUTH.RETRIEVE.identityId) + }), + response: { + 200: z.object({ + identityTokenAuth: IdentityTokenAuthsSchema + }) + } + }, + handler: async (req) => { + const identityTokenAuth = await server.services.identityTokenAuth.getTokenAuth({ + identityId: req.params.identityId, + actor: req.permission.type, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityTokenAuth.orgId, + event: { + type: EventType.GET_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: identityTokenAuth.identityId + } + } + }); + + return { identityTokenAuth }; + } + }); + + server.route({ + method: "DELETE", + url: "/token-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete Token Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(TOKEN_AUTH.REVOKE.identityId) + }), + response: { + 200: z.object({ + identityTokenAuth: IdentityTokenAuthsSchema + }) + } + }, + handler: async (req) => { + const identityTokenAuth = await server.services.identityTokenAuth.revokeIdentityTokenAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityTokenAuth.orgId, + event: { + type: EventType.REVOKE_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: identityTokenAuth.identityId + } + } + }); + + return { identityTokenAuth }; + } + }); + + server.route({ + method: "POST", + url: "/token-auth/identities/:identityId/tokens", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Create token for identity with Token Auth", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(TOKEN_AUTH.CREATE_TOKEN.identityId) + }), + body: z.object({ + name: z.string().optional().describe(TOKEN_AUTH.CREATE_TOKEN.name) + }), + response: { + 200: z.object({ + accessToken: z.string(), + expiresIn: z.coerce.number(), + accessTokenMaxTTL: z.coerce.number(), + tokenType: z.literal("Bearer") + }) + } + }, + handler: async (req) => { + const { identityTokenAuth, accessToken, identityAccessToken, identityMembershipOrg } = + await server.services.identityTokenAuth.createTokenAuthToken({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityMembershipOrg.orgId, + event: { + type: EventType.CREATE_TOKEN_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: identityTokenAuth.identityId, + identityAccessTokenId: identityAccessToken.id + } + } + }); + + return { + accessToken, + tokenType: "Bearer" as const, + expiresIn: identityTokenAuth.accessTokenTTL, + accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL + }; + } + }); + + server.route({ + method: "GET", + url: "/token-auth/identities/:identityId/tokens", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get tokens for identity with Token Auth", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(TOKEN_AUTH.GET_TOKENS.identityId) + }), + querystring: z.object({ + offset: z.coerce.number().min(0).max(100).default(0).describe(TOKEN_AUTH.GET_TOKENS.offset), + limit: z.coerce.number().min(1).max(100).default(20).describe(TOKEN_AUTH.GET_TOKENS.limit) + }), + response: { + 200: z.object({ + tokens: IdentityAccessTokensSchema.array() + }) + } + }, + handler: async (req) => { + const { tokens, identityMembershipOrg } = await server.services.identityTokenAuth.getTokenAuthTokens({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId, + ...req.query + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityMembershipOrg.orgId, + event: { + type: EventType.GET_TOKENS_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: req.params.identityId + } + } + }); + + return { tokens }; + } + }); + + server.route({ + method: "PATCH", + url: "/token-auth/tokens/:tokenId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Update token for identity with Token Auth", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + tokenId: z.string().describe(TOKEN_AUTH.UPDATE_TOKEN.tokenId) + }), + body: z.object({ + name: z.string().optional().describe(TOKEN_AUTH.UPDATE_TOKEN.name) + }), + response: { + 200: z.object({ + token: IdentityAccessTokensSchema + }) + } + }, + handler: async (req) => { + const { token, identityMembershipOrg } = await server.services.identityTokenAuth.updateTokenAuthToken({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + tokenId: req.params.tokenId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityMembershipOrg.orgId, + event: { + type: EventType.UPDATE_TOKEN_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: token.identityId, + tokenId: token.id, + name: req.body.name + } + } + }); + + return { token }; + } + }); + + server.route({ + method: "POST", + url: "/token-auth/tokens/:tokenId/revoke", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Revoke token for identity with Token Auth", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + tokenId: z.string().describe(TOKEN_AUTH.REVOKE_TOKEN.tokenId) + }), + response: { + 200: z.object({ + message: z.string() + }) + } + }, + handler: async (req) => { + await server.services.identityTokenAuth.revokeTokenAuthToken({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + tokenId: req.params.tokenId + }); + + return { + message: "Successfully revoked access token" + }; + } + }); +}; diff --git a/backend/src/server/routes/v1/identity-ua.ts b/backend/src/server/routes/v1/identity-universal-auth-router.ts similarity index 79% rename from backend/src/server/routes/v1/identity-ua.ts rename to backend/src/server/routes/v1/identity-universal-auth-router.ts index 670f52416d..f103a39e0c 100644 --- a/backend/src/server/routes/v1/identity-ua.ts +++ b/backend/src/server/routes/v1/identity-universal-auth-router.ts @@ -107,6 +107,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { .number() .int() .min(1) + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenTTL must have a non zero number" }) @@ -115,6 +116,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) @@ -134,7 +136,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }, handler: async (req) => { - const identityUniversalAuth = await server.services.identityUa.attachUa({ + const identityUniversalAuth = await server.services.identityUa.attachUniversalAuth({ actor: req.permission.type, actorId: req.permission.id, actorOrgId: req.permission.orgId, @@ -196,7 +198,13 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { .min(1) .optional() .describe(UNIVERSAL_AUTH.UPDATE.accessTokenTrustedIps), - accessTokenTTL: z.number().int().min(0).optional().describe(UNIVERSAL_AUTH.UPDATE.accessTokenTTL), + accessTokenTTL: z + .number() + .int() + .min(0) + .max(315360000) + .optional() + .describe(UNIVERSAL_AUTH.UPDATE.accessTokenTTL), accessTokenNumUsesLimit: z .number() .int() @@ -206,6 +214,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { accessTokenMaxTTL: z .number() .int() + .max(315360000) .refine((value) => value !== 0, { message: "accessTokenMaxTTL must have a non zero number" }) @@ -219,7 +228,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }, handler: async (req) => { - const identityUniversalAuth = await server.services.identityUa.updateUa({ + const identityUniversalAuth = await server.services.identityUa.updateUniversalAuth({ actor: req.permission.type, actorId: req.permission.id, actorOrgId: req.permission.orgId, @@ -272,7 +281,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }, handler: async (req) => { - const identityUniversalAuth = await server.services.identityUa.getIdentityUa({ + const identityUniversalAuth = await server.services.identityUa.getIdentityUniversalAuth({ actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, @@ -295,6 +304,53 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "DELETE", + url: "/universal-auth/identities/:identityId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete Universal Auth configuration on identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(UNIVERSAL_AUTH.REVOKE.identityId) + }), + response: { + 200: z.object({ + identityUniversalAuth: IdentityUniversalAuthsSchema + }) + } + }, + handler: async (req) => { + const identityUniversalAuth = await server.services.identityUa.revokeIdentityUniversalAuth({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityUniversalAuth.orgId, + event: { + type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH, + metadata: { + identityId: identityUniversalAuth.identityId + } + } + }); + + return { identityUniversalAuth }; + } + }); + server.route({ method: "POST", url: "/universal-auth/identities/:identityId/client-secrets", @@ -315,7 +371,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { body: z.object({ description: z.string().trim().default("").describe(UNIVERSAL_AUTH.CREATE_CLIENT_SECRET.description), numUsesLimit: z.number().min(0).default(0).describe(UNIVERSAL_AUTH.CREATE_CLIENT_SECRET.numUsesLimit), - ttl: z.number().min(0).default(0).describe(UNIVERSAL_AUTH.CREATE_CLIENT_SECRET.ttl) + ttl: z.number().min(0).max(315360000).default(0).describe(UNIVERSAL_AUTH.CREATE_CLIENT_SECRET.ttl) }), response: { 200: z.object({ @@ -325,14 +381,15 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }, handler: async (req) => { - const { clientSecret, clientSecretData, orgId } = await server.services.identityUa.createUaClientSecret({ - actor: req.permission.type, - actorId: req.permission.id, - actorAuthMethod: req.permission.authMethod, - actorOrgId: req.permission.orgId, - identityId: req.params.identityId, - ...req.body - }); + const { clientSecret, clientSecretData, orgId } = + await server.services.identityUa.createUniversalAuthClientSecret({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId, + ...req.body + }); await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, @@ -374,13 +431,15 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }, handler: async (req) => { - const { clientSecrets: clientSecretData, orgId } = await server.services.identityUa.getUaClientSecrets({ - actor: req.permission.type, - actorId: req.permission.id, - actorAuthMethod: req.permission.authMethod, - actorOrgId: req.permission.orgId, - identityId: req.params.identityId - }); + const { clientSecrets: clientSecretData, orgId } = await server.services.identityUa.getUniversalAuthClientSecrets( + { + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId + } + ); await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, @@ -396,6 +455,56 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/universal-auth/identities/:identityId/client-secrets/:clientSecretId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get Universal Auth Client Secret for identity", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(UNIVERSAL_AUTH.GET_CLIENT_SECRET.identityId), + clientSecretId: z.string().describe(UNIVERSAL_AUTH.GET_CLIENT_SECRET.clientSecretId) + }), + response: { + 200: z.object({ + clientSecretData: sanitizedClientSecretSchema + }) + } + }, + handler: async (req) => { + const clientSecretData = await server.services.identityUa.getUniversalAuthClientSecretById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + identityId: req.params.identityId, + clientSecretId: req.params.clientSecretId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: clientSecretData.orgId, + event: { + type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET, + metadata: { + identityId: clientSecretData.identityId, + clientSecretId: clientSecretData.id + } + } + }); + + return { clientSecretData }; + } + }); + server.route({ method: "POST", url: "/universal-auth/identities/:identityId/client-secrets/:clientSecretId/revoke", @@ -421,7 +530,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { } }, handler: async (req) => { - const clientSecretData = await server.services.identityUa.revokeUaClientSecret({ + const clientSecretData = await server.services.identityUa.revokeUniversalAuthClientSecret({ actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, diff --git a/backend/src/server/routes/v1/index.ts b/backend/src/server/routes/v1/index.ts index cbf67ce790..f9edfc18c0 100644 --- a/backend/src/server/routes/v1/index.ts +++ b/backend/src/server/routes/v1/index.ts @@ -1,18 +1,30 @@ +import { registerCmekRouter } from "@app/server/routes/v1/cmek-router"; +import { registerDashboardRouter } from "@app/server/routes/v1/dashboard-router"; + import { registerAdminRouter } from "./admin-router"; import { registerAuthRoutes } from "./auth-router"; import { registerProjectBotRouter } from "./bot-router"; +import { registerCaRouter } from "./certificate-authority-router"; +import { registerCertRouter } from "./certificate-router"; +import { registerCertificateTemplateRouter } from "./certificate-template-router"; +import { registerExternalGroupOrgRoleMappingRouter } from "./external-group-org-role-mapping-router"; import { registerIdentityAccessTokenRouter } from "./identity-access-token-router"; import { registerIdentityAwsAuthRouter } from "./identity-aws-iam-auth-router"; import { registerIdentityAzureAuthRouter } from "./identity-azure-auth-router"; import { registerIdentityGcpAuthRouter } from "./identity-gcp-auth-router"; import { registerIdentityKubernetesRouter } from "./identity-kubernetes-auth-router"; +import { registerIdentityOidcAuthRouter } from "./identity-oidc-auth-router"; import { registerIdentityRouter } from "./identity-router"; -import { registerIdentityUaRouter } from "./identity-ua"; +import { registerIdentityTokenAuthRouter } from "./identity-token-auth-router"; +import { registerIdentityUaRouter } from "./identity-universal-auth-router"; import { registerIntegrationAuthRouter } from "./integration-auth-router"; import { registerIntegrationRouter } from "./integration-router"; import { registerInviteOrgRouter } from "./invite-org-router"; +import { registerOrgAdminRouter } from "./org-admin-router"; import { registerOrgRouter } from "./organization-router"; import { registerPasswordRouter } from "./password-router"; +import { registerPkiAlertRouter } from "./pki-alert-router"; +import { registerPkiCollectionRouter } from "./pki-collection-router"; import { registerProjectEnvRouter } from "./project-env-router"; import { registerProjectKeyRouter } from "./project-key-router"; import { registerProjectMembershipRouter } from "./project-membership-router"; @@ -21,34 +33,48 @@ import { registerSecretFolderRouter } from "./secret-folder-router"; import { registerSecretImportRouter } from "./secret-import-router"; import { registerSecretSharingRouter } from "./secret-sharing-router"; import { registerSecretTagRouter } from "./secret-tag-router"; +import { registerSlackRouter } from "./slack-router"; import { registerSsoRouter } from "./sso-router"; import { registerUserActionRouter } from "./user-action-router"; +import { registerUserEngagementRouter } from "./user-engagement-router"; import { registerUserRouter } from "./user-router"; import { registerWebhookRouter } from "./webhook-router"; +import { registerWorkflowIntegrationRouter } from "./workflow-integration-router"; export const registerV1Routes = async (server: FastifyZodProvider) => { await server.register(registerSsoRouter, { prefix: "/sso" }); await server.register( async (authRouter) => { await authRouter.register(registerAuthRoutes); + await authRouter.register(registerIdentityTokenAuthRouter); await authRouter.register(registerIdentityUaRouter); await authRouter.register(registerIdentityKubernetesRouter); await authRouter.register(registerIdentityGcpAuthRouter); await authRouter.register(registerIdentityAccessTokenRouter); await authRouter.register(registerIdentityAwsAuthRouter); await authRouter.register(registerIdentityAzureAuthRouter); + await authRouter.register(registerIdentityOidcAuthRouter); }, { prefix: "/auth" } ); await server.register(registerPasswordRouter, { prefix: "/password" }); await server.register(registerOrgRouter, { prefix: "/organization" }); await server.register(registerAdminRouter, { prefix: "/admin" }); + await server.register(registerOrgAdminRouter, { prefix: "/organization-admin" }); await server.register(registerUserRouter, { prefix: "/user" }); await server.register(registerInviteOrgRouter, { prefix: "/invite-org" }); await server.register(registerUserActionRouter, { prefix: "/user-action" }); await server.register(registerSecretImportRouter, { prefix: "/secret-imports" }); await server.register(registerSecretFolderRouter, { prefix: "/folders" }); + await server.register( + async (workflowIntegrationRouter) => { + await workflowIntegrationRouter.register(registerWorkflowIntegrationRouter); + await workflowIntegrationRouter.register(registerSlackRouter, { prefix: "/slack" }); + }, + { prefix: "/workflow-integrations" } + ); + await server.register( async (projectRouter) => { await projectRouter.register(registerProjectRouter); @@ -61,10 +87,25 @@ export const registerV1Routes = async (server: FastifyZodProvider) => { { prefix: "/workspace" } ); + await server.register( + async (pkiRouter) => { + await pkiRouter.register(registerCaRouter, { prefix: "/ca" }); + await pkiRouter.register(registerCertRouter, { prefix: "/certificates" }); + await pkiRouter.register(registerCertificateTemplateRouter, { prefix: "/certificate-templates" }); + await pkiRouter.register(registerPkiAlertRouter, { prefix: "/alerts" }); + await pkiRouter.register(registerPkiCollectionRouter, { prefix: "/collections" }); + }, + { prefix: "/pki" } + ); + await server.register(registerProjectBotRouter, { prefix: "/bot" }); await server.register(registerIntegrationRouter, { prefix: "/integration" }); await server.register(registerIntegrationAuthRouter, { prefix: "/integration-auth" }); await server.register(registerWebhookRouter, { prefix: "/webhooks" }); await server.register(registerIdentityRouter, { prefix: "/identities" }); await server.register(registerSecretSharingRouter, { prefix: "/secret-sharing" }); + await server.register(registerUserEngagementRouter, { prefix: "/user-engagement" }); + await server.register(registerDashboardRouter, { prefix: "/dashboard" }); + await server.register(registerCmekRouter, { prefix: "/kms" }); + await server.register(registerExternalGroupOrgRoleMappingRouter, { prefix: "/external-group-mappings" }); }; diff --git a/backend/src/server/routes/v1/integration-auth-router.ts b/backend/src/server/routes/v1/integration-auth-router.ts index 899c1cac83..dae37ddb3d 100644 --- a/backend/src/server/routes/v1/integration-auth-router.ts +++ b/backend/src/server/routes/v1/integration-auth-router.ts @@ -189,6 +189,7 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider) workspaceId: z.string().trim(), code: z.string().trim(), integration: z.string().trim(), + installationId: z.string().trim().optional(), url: z.string().trim().url().optional() }), response: { @@ -240,6 +241,12 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider) integration: z.string().trim().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.integration), accessId: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.accessId), accessToken: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.accessToken), + awsAssumeIamRoleArn: z + .string() + .url() + .trim() + .optional() + .describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.awsAssumeIamRoleArn), url: z.string().url().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.url), namespace: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.namespace), refreshToken: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.refreshToken) @@ -287,6 +294,7 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider) }), querystring: z.object({ teamId: z.string().trim().optional(), + azureDevOpsOrgName: z.string().trim().optional(), workspaceSlug: z.string().trim().optional() }), response: { @@ -445,6 +453,40 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider) } }); + server.route({ + method: "POST", + url: "/:integrationAuthId/duplicate", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT]), + schema: { + params: z.object({ + integrationAuthId: z.string().trim() + }), + body: z.object({ + projectId: z.string().trim() + }), + response: { + 200: z.object({ + integrationAuth: integrationAuthPubSchema + }) + } + }, + handler: async (req) => { + const integrationAuth = await server.services.integrationAuth.duplicateIntegrationAuth({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + id: req.params.integrationAuthId, + projectId: req.body.projectId + }); + + return { integrationAuth }; + } + }); + server.route({ method: "GET", url: "/:integrationAuthId/github/envs", @@ -849,6 +891,48 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider) } }); + server.route({ + method: "GET", + url: "/:integrationAuthId/bitbucket/environments", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT]), + schema: { + params: z.object({ + integrationAuthId: z.string().trim() + }), + querystring: z.object({ + workspaceSlug: z.string().trim().min(1, { message: "Workspace slug required" }), + repoSlug: z.string().trim().min(1, { message: "Repo slug required" }) + }), + response: { + 200: z.object({ + environments: z + .object({ + name: z.string(), + slug: z.string(), + uuid: z.string(), + type: z.string() + }) + .array() + }) + } + }, + handler: async (req) => { + const environments = await server.services.integrationAuth.getBitbucketEnvironments({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.integrationAuthId, + workspaceSlug: req.query.workspaceSlug, + repoSlug: req.query.repoSlug + }); + return { environments }; + } + }); + server.route({ method: "GET", url: "/:integrationAuthId/northflank/secret-groups", diff --git a/backend/src/server/routes/v1/integration-router.ts b/backend/src/server/routes/v1/integration-router.ts index 97a7f4d7a2..86d3218524 100644 --- a/backend/src/server/routes/v1/integration-router.ts +++ b/backend/src/server/routes/v1/integration-router.ts @@ -4,13 +4,15 @@ import { IntegrationsSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { INTEGRATION } from "@app/lib/api-docs"; import { removeTrailingSlash, shake } from "@app/lib/fn"; -import { writeLimit } from "@app/server/config/rateLimiter"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema"; import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types"; +import {} from "../sanitizedSchemas"; + export const registerIntegrationRouter = async (server: FastifyZodProvider) => { server.route({ method: "POST", @@ -50,7 +52,13 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - integration: IntegrationsSchema + integration: IntegrationsSchema.extend({ + environment: z.object({ + slug: z.string().trim(), + name: z.string().trim(), + id: z.string().trim() + }) + }) }) } }, @@ -129,14 +137,20 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { .default("/") .transform(removeTrailingSlash) .describe(INTEGRATION.UPDATE.secretPath), - targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment), - owner: z.string().trim().describe(INTEGRATION.UPDATE.owner), - environment: z.string().trim().describe(INTEGRATION.UPDATE.environment), + targetEnvironment: z.string().trim().optional().describe(INTEGRATION.UPDATE.targetEnvironment), + owner: z.string().trim().optional().describe(INTEGRATION.UPDATE.owner), + environment: z.string().trim().optional().describe(INTEGRATION.UPDATE.environment), metadata: IntegrationMetadataSchema.optional() }), response: { 200: z.object({ - integration: IntegrationsSchema + integration: IntegrationsSchema.extend({ + environment: z.object({ + slug: z.string().trim(), + name: z.string().trim(), + id: z.string().trim() + }) + }) }) } }, @@ -154,6 +168,48 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/:integrationId", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get an integration by integration id", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + integrationId: z.string().trim().describe(INTEGRATION.UPDATE.integrationId) + }), + response: { + 200: z.object({ + integration: IntegrationsSchema.extend({ + environment: z.object({ + slug: z.string().trim(), + name: z.string().trim(), + id: z.string().trim() + }) + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const integration = await server.services.integration.getIntegration({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.integrationId + }); + + return { integration }; + } + }); + server.route({ method: "DELETE", url: "/:integrationId", @@ -170,6 +226,12 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { params: z.object({ integrationId: z.string().trim().describe(INTEGRATION.DELETE.integrationId) }), + querystring: z.object({ + shouldDeleteIntegrationSecrets: z + .enum(["true", "false"]) + .optional() + .transform((val) => val === "true") + }), response: { 200: z.object({ integration: IntegrationsSchema @@ -183,7 +245,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { actorAuthMethod: req.permission.authMethod, actor: req.permission.type, actorOrgId: req.permission.orgId, - id: req.params.integrationId + id: req.params.integrationId, + shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets }); await server.services.auditLog.createAuditLog({ @@ -205,7 +268,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { targetService: integration.targetService, targetServiceId: integration.targetServiceId, path: integration.path, - region: integration.region + region: integration.region, + shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets // eslint-disable-next-line }) as any } diff --git a/backend/src/server/routes/v1/invite-org-router.ts b/backend/src/server/routes/v1/invite-org-router.ts index 873710f106..9991f60322 100644 --- a/backend/src/server/routes/v1/invite-org-router.ts +++ b/backend/src/server/routes/v1/invite-org-router.ts @@ -1,6 +1,6 @@ import { z } from "zod"; -import { UsersSchema } from "@app/db/schemas"; +import { OrgMembershipRole, ProjectMembershipRole, UsersSchema } from "@app/db/schemas"; import { inviteUserRateLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; @@ -16,23 +16,42 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => { method: "POST", schema: { body: z.object({ - inviteeEmail: z.string().trim().email(), - organizationId: z.string().trim() + inviteeEmails: z.array(z.string().trim().email()), + organizationId: z.string().trim(), + projects: z + .object({ + id: z.string(), + projectRoleSlug: z.string().array().default([ProjectMembershipRole.Member]) + }) + .array() + .optional(), + organizationRoleSlug: z.string().default(OrgMembershipRole.Member) }), response: { 200: z.object({ message: z.string(), - completeInviteLink: z.string().optional() + completeInviteLinks: z + .array( + z.object({ + email: z.string(), + link: z.string() + }) + ) + .optional() }) } }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { if (req.auth.actor !== ActorType.USER) return; - const completeInviteLink = await server.services.org.inviteUserToOrganization({ + + const { signupTokens: completeInviteLinks } = await server.services.org.inviteUserToOrganization({ orgId: req.body.organizationId, - userId: req.permission.id, - inviteeEmail: req.body.inviteeEmail, + actor: req.permission.type, + actorId: req.permission.id, + inviteeEmails: req.body.inviteeEmails, + projects: req.body.projects, + organizationRoleSlug: req.body.organizationRoleSlug, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId }); @@ -41,14 +60,15 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => { event: PostHogEventTypes.UserOrgInvitation, distinctId: getTelemetryDistinctId(req), properties: { - inviteeEmail: req.body.inviteeEmail, + inviteeEmails: req.body.inviteeEmails, + organizationRoleSlug: req.body.organizationRoleSlug, ...req.auditLogInfo } }); return { - completeInviteLink, - message: `Send an invite link to ${req.body.inviteeEmail}` + completeInviteLinks, + message: `Send an invite link to ${req.body.inviteeEmails.join(", ")}` }; } }); diff --git a/backend/src/server/routes/v1/org-admin-router.ts b/backend/src/server/routes/v1/org-admin-router.ts new file mode 100644 index 0000000000..2d28b09bdd --- /dev/null +++ b/backend/src/server/routes/v1/org-admin-router.ts @@ -0,0 +1,90 @@ +import { z } from "zod"; + +import { ProjectMembershipsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +import { SanitizedProjectSchema } from "../sanitizedSchemas"; + +export const registerOrgAdminRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/projects", + config: { + rateLimit: readLimit + }, + schema: { + querystring: z.object({ + search: z.string().optional(), + offset: z.coerce.number().default(0), + limit: z.coerce.number().max(100).default(50) + }), + response: { + 200: z.object({ + projects: SanitizedProjectSchema.array(), + count: z.coerce.number() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { projects, count } = await server.services.orgAdmin.listOrgProjects({ + limit: req.query.limit, + offset: req.query.offset, + search: req.query.search, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actor: req.permission.type + }); + return { projects, count }; + } + }); + + server.route({ + method: "POST", + url: "/projects/:projectId/grant-admin-access", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + projectId: z.string() + }), + response: { + 200: z.object({ + membership: ProjectMembershipsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { membership } = await server.services.orgAdmin.grantProjectAdminAccess({ + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actorId: req.permission.id, + actor: req.permission.type, + projectId: req.params.projectId + }); + if (req.auth.authMode === AuthMode.JWT) { + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.projectId, + event: { + type: EventType.ORG_ADMIN_ACCESS_PROJECT, + metadata: { + projectId: req.params.projectId, + username: req.auth.user.username, + email: req.auth.user.email || "", + userId: req.auth.userId + } + } + }); + } + + return { membership }; + } + }); +}; diff --git a/backend/src/server/routes/v1/organization-router.ts b/backend/src/server/routes/v1/organization-router.ts index 808f125bb2..07f795779f 100644 --- a/backend/src/server/routes/v1/organization-router.ts +++ b/backend/src/server/routes/v1/organization-router.ts @@ -1,6 +1,8 @@ +import slugify from "@sindresorhus/slugify"; import { z } from "zod"; import { + AuditLogsSchema, GroupsSchema, IncidentContactsSchema, OrganizationsSchema, @@ -8,10 +10,14 @@ import { OrgRolesSchema, UsersSchema } from "@app/db/schemas"; -import { ORGANIZATIONS } from "@app/lib/api-docs"; +import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types"; +import { AUDIT_LOGS, ORGANIZATIONS } from "@app/lib/api-docs"; +import { getLastMidnightDateISO } from "@app/lib/fn"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -import { AuthMode } from "@app/services/auth/auth-type"; +import { ActorType, AuthMode, MfaMethod } from "@app/services/auth/auth-type"; + +import { integrationAuthPubSchema } from "../sanitizedSchemas"; export const registerOrgRouter = async (server: FastifyZodProvider) => { server.route({ @@ -23,7 +29,9 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { schema: { response: { 200: z.object({ - organizations: OrganizationsSchema.array() + organizations: OrganizationsSchema.extend({ + orgAuthMethod: z.string() + }).array() }) } }, @@ -62,6 +70,128 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/:organizationId/integration-authorizations", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + organizationId: z.string().trim() + }), + response: { + 200: z.object({ + authorizations: integrationAuthPubSchema.array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const authorizations = await server.services.integrationAuth.listOrgIntegrationAuth({ + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + actorOrgId: req.permission.orgId + }); + + return { authorizations }; + } + }); + + server.route({ + method: "GET", + url: "/audit-logs", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get all audit logs for an organization", + querystring: z.object({ + projectId: z.string().optional().describe(AUDIT_LOGS.EXPORT.projectId), + actorType: z.nativeEnum(ActorType).optional(), + // eventType is split with , for multiple values, we need to transform it to array + eventType: z + .string() + .optional() + .transform((val) => (val ? val.split(",") : undefined)), + userAgentType: z.nativeEnum(UserAgentType).optional().describe(AUDIT_LOGS.EXPORT.userAgentType), + eventMetadata: z + .string() + .optional() + .transform((val) => { + if (!val) { + return undefined; + } + + const pairs = val.split(","); + + return pairs.reduce( + (acc, pair) => { + const [key, value] = pair.split("="); + if (key && value) { + acc[key] = value; + } + return acc; + }, + {} as Record + ); + }) + .describe(AUDIT_LOGS.EXPORT.eventMetadata), + startDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.startDate), + endDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.endDate), + offset: z.coerce.number().default(0).describe(AUDIT_LOGS.EXPORT.offset), + limit: z.coerce.number().default(20).describe(AUDIT_LOGS.EXPORT.limit), + actor: z.string().optional().describe(AUDIT_LOGS.EXPORT.actor) + }), + + response: { + 200: z.object({ + auditLogs: AuditLogsSchema.omit({ + eventMetadata: true, + eventType: true, + actor: true, + actorMetadata: true + }) + .merge( + z.object({ + event: z.object({ + type: z.string(), + metadata: z.any() + }), + actor: z.object({ + type: z.string(), + metadata: z.any() + }) + }) + ) + .array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const auditLogs = await server.services.auditLog.listAuditLogs({ + filter: { + ...req.query, + endDate: req.query.endDate, + projectId: req.query.projectId, + startDate: req.query.startDate || getLastMidnightDateISO(), + auditLogActorId: req.query.actor, + actorType: req.query.actorType, + eventType: req.query.eventType as EventType[] | undefined + }, + + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + + return { auditLogs }; + } + }); + server.route({ method: "GET", url: "/:organizationId/users", @@ -81,7 +211,8 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { email: true, firstName: true, lastName: true, - id: true + id: true, + superAdmin: true }).merge(z.object({ publicKey: z.string().nullable() })) }) ) @@ -119,7 +250,17 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { .regex(/^[a-zA-Z0-9-]+$/, "Slug must only contain alphanumeric characters or hyphens") .optional(), authEnforced: z.boolean().optional(), - scimEnabled: z.boolean().optional() + scimEnabled: z.boolean().optional(), + defaultMembershipRoleSlug: z + .string() + .min(1) + .trim() + .refine((v) => slugify(v) === v, { + message: "Membership role must be a valid slug" + }) + .optional(), + enforceMfa: z.boolean().optional(), + selectedMfaMethod: z.nativeEnum(MfaMethod).optional() }), response: { 200: z.object({ diff --git a/backend/src/server/routes/v1/password-router.ts b/backend/src/server/routes/v1/password-router.ts index a8ef3fb775..316ddcb538 100644 --- a/backend/src/server/routes/v1/password-router.ts +++ b/backend/src/server/routes/v1/password-router.ts @@ -51,7 +51,8 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => { encryptedPrivateKeyIV: z.string().trim(), encryptedPrivateKeyTag: z.string().trim(), salt: z.string().trim(), - verifier: z.string().trim() + verifier: z.string().trim(), + password: z.string().trim() }), response: { 200: z.object({ @@ -64,7 +65,7 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => { const appCfg = getConfig(); await server.services.password.changePassword({ ...req.body, userId: req.permission.id }); - void res.cookie("jid", appCfg.COOKIE_SECRET_SIGN_KEY, { + void res.cookie("jid", "", { httpOnly: true, path: "/", sameSite: "strict", diff --git a/backend/src/server/routes/v1/pki-alert-router.ts b/backend/src/server/routes/v1/pki-alert-router.ts new file mode 100644 index 0000000000..f64ec9e47e --- /dev/null +++ b/backend/src/server/routes/v1/pki-alert-router.ts @@ -0,0 +1,198 @@ +import { z } from "zod"; + +import { PkiAlertsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { ALERTS } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +export const registerPkiAlertRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Create PKI alert", + body: z.object({ + projectId: z.string().trim().describe(ALERTS.CREATE.projectId), + pkiCollectionId: z.string().trim().describe(ALERTS.CREATE.pkiCollectionId), + name: z.string().trim().describe(ALERTS.CREATE.name), + alertBeforeDays: z.number().describe(ALERTS.CREATE.alertBeforeDays), + emails: z + .array(z.string().trim().email({ message: "Invalid email address" })) + .min(1, { message: "You must specify at least 1 email" }) + .max(5, { message: "You can specify a maximum of 5 emails" }) + .describe(ALERTS.CREATE.emails) + }), + response: { + 200: PkiAlertsSchema + } + }, + handler: async (req) => { + const alert = await server.services.pkiAlert.createPkiAlert({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: alert.projectId, + event: { + type: EventType.CREATE_PKI_ALERT, + metadata: { + pkiAlertId: alert.id, + pkiCollectionId: alert.pkiCollectionId, + name: alert.name, + alertBeforeDays: alert.alertBeforeDays, + recipientEmails: alert.recipientEmails + } + } + }); + + return alert; + } + }); + + server.route({ + method: "GET", + url: "/:alertId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get PKI alert", + params: z.object({ + alertId: z.string().trim().describe(ALERTS.GET.alertId) + }), + response: { + 200: PkiAlertsSchema + } + }, + handler: async (req) => { + const alert = await server.services.pkiAlert.getPkiAlertById({ + alertId: req.params.alertId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: alert.projectId, + event: { + type: EventType.GET_PKI_ALERT, + metadata: { + pkiAlertId: alert.id + } + } + }); + + return alert; + } + }); + + server.route({ + method: "PATCH", + url: "/:alertId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Update PKI alert", + params: z.object({ + alertId: z.string().trim().describe(ALERTS.UPDATE.alertId) + }), + body: z.object({ + name: z.string().trim().optional().describe(ALERTS.UPDATE.name), + alertBeforeDays: z.number().optional().describe(ALERTS.UPDATE.alertBeforeDays), + pkiCollectionId: z.string().trim().optional().describe(ALERTS.UPDATE.pkiCollectionId), + emails: z + .array(z.string().trim().email({ message: "Invalid email address" })) + .min(1, { message: "You must specify at least 1 email" }) + .max(5, { message: "You can specify a maximum of 5 emails" }) + .optional() + .describe(ALERTS.UPDATE.emails) + }), + response: { + 200: PkiAlertsSchema + } + }, + handler: async (req) => { + const alert = await server.services.pkiAlert.updatePkiAlert({ + alertId: req.params.alertId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: alert.projectId, + event: { + type: EventType.UPDATE_PKI_ALERT, + metadata: { + pkiAlertId: alert.id, + pkiCollectionId: alert.pkiCollectionId, + name: alert.name, + alertBeforeDays: alert.alertBeforeDays, + recipientEmails: alert.recipientEmails + } + } + }); + + return alert; + } + }); + + server.route({ + method: "DELETE", + url: "/:alertId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete PKI alert", + params: z.object({ + alertId: z.string().trim().describe(ALERTS.DELETE.alertId) + }), + response: { + 200: PkiAlertsSchema + } + }, + handler: async (req) => { + const alert = await server.services.pkiAlert.deletePkiAlert({ + alertId: req.params.alertId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: alert.projectId, + event: { + type: EventType.DELETE_PKI_ALERT, + metadata: { + pkiAlertId: alert.id + } + } + }); + + return alert; + } + }); +}; diff --git a/backend/src/server/routes/v1/pki-collection-router.ts b/backend/src/server/routes/v1/pki-collection-router.ts new file mode 100644 index 0000000000..2f2add5c1c --- /dev/null +++ b/backend/src/server/routes/v1/pki-collection-router.ts @@ -0,0 +1,338 @@ +import { z } from "zod"; + +import { PkiCollectionItemsSchema, PkiCollectionsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { PKI_COLLECTIONS } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { PkiItemType } from "@app/services/pki-collection/pki-collection-types"; + +export const registerPkiCollectionRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Create PKI collection", + body: z.object({ + projectId: z.string().trim().describe(PKI_COLLECTIONS.CREATE.projectId), + name: z.string().trim().describe(PKI_COLLECTIONS.CREATE.name), + description: z.string().trim().default("").describe(PKI_COLLECTIONS.CREATE.description) + }), + response: { + 200: PkiCollectionsSchema + } + }, + handler: async (req) => { + const pkiCollection = await server.services.pkiCollection.createPkiCollection({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: pkiCollection.projectId, + event: { + type: EventType.CREATE_PKI_COLLECTION, + metadata: { + pkiCollectionId: pkiCollection.id, + name: pkiCollection.name + } + } + }); + + return pkiCollection; + } + }); + + server.route({ + method: "GET", + url: "/:collectionId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get PKI collection", + params: z.object({ + collectionId: z.string().trim().describe(PKI_COLLECTIONS.GET.collectionId) + }), + response: { + 200: PkiCollectionsSchema + } + }, + handler: async (req) => { + const pkiCollection = await server.services.pkiCollection.getPkiCollectionById({ + collectionId: req.params.collectionId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: pkiCollection.projectId, + event: { + type: EventType.GET_PKI_COLLECTION, + metadata: { + pkiCollectionId: pkiCollection.id + } + } + }); + + return pkiCollection; + } + }); + + server.route({ + method: "PATCH", + url: "/:collectionId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Update PKI collection", + params: z.object({ + collectionId: z.string().trim().describe(PKI_COLLECTIONS.UPDATE.collectionId) + }), + body: z.object({ + name: z.string().trim().optional().describe(PKI_COLLECTIONS.UPDATE.name), + description: z.string().trim().optional().describe(PKI_COLLECTIONS.UPDATE.description) + }), + response: { + 200: PkiCollectionsSchema + } + }, + handler: async (req) => { + const pkiCollection = await server.services.pkiCollection.updatePkiCollection({ + collectionId: req.params.collectionId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: pkiCollection.projectId, + event: { + type: EventType.UPDATE_PKI_COLLECTION, + metadata: { + pkiCollectionId: pkiCollection.id, + name: pkiCollection.name + } + } + }); + + return pkiCollection; + } + }); + + server.route({ + method: "DELETE", + url: "/:collectionId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Delete PKI collection", + params: z.object({ + collectionId: z.string().trim().describe(PKI_COLLECTIONS.DELETE.collectionId) + }), + response: { + 200: PkiCollectionsSchema + } + }, + handler: async (req) => { + const pkiCollection = await server.services.pkiCollection.deletePkiCollection({ + collectionId: req.params.collectionId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: pkiCollection.projectId, + event: { + type: EventType.DELETE_PKI_COLLECTION, + metadata: { + pkiCollectionId: pkiCollection.id + } + } + }); + + return pkiCollection; + } + }); + + server.route({ + method: "GET", + url: "/:collectionId/items", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Get items in PKI collection", + params: z.object({ + collectionId: z.string().trim().describe(PKI_COLLECTIONS.LIST_ITEMS.collectionId) + }), + querystring: z.object({ + type: z.nativeEnum(PkiItemType).optional().describe(PKI_COLLECTIONS.LIST_ITEMS.type), + offset: z.coerce.number().min(0).max(100).default(0).describe(PKI_COLLECTIONS.LIST_ITEMS.offset), + limit: z.coerce.number().min(1).max(100).default(25).describe(PKI_COLLECTIONS.LIST_ITEMS.limit) + }), + response: { + 200: z.object({ + collectionItems: z.array( + PkiCollectionItemsSchema.omit({ caId: true, certId: true }).extend({ + type: z.nativeEnum(PkiItemType), + itemId: z.string().trim(), + notBefore: z.date(), + notAfter: z.date(), + friendlyName: z.string().trim() + }) + ), + totalCount: z.number() + }) + } + }, + handler: async (req) => { + const { pkiCollection, pkiCollectionItems, totalCount } = + await server.services.pkiCollection.getPkiCollectionItems({ + collectionId: req.params.collectionId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.query + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: pkiCollection.projectId, + event: { + type: EventType.GET_PKI_COLLECTION_ITEMS, + metadata: { + pkiCollectionId: pkiCollection.id + } + } + }); + + return { + collectionItems: pkiCollectionItems, + totalCount + }; + } + }); + + server.route({ + method: "POST", + url: "/:collectionId/items", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Add item to PKI collection", + params: z.object({ + collectionId: z.string().trim().describe(PKI_COLLECTIONS.ADD_ITEM.collectionId) + }), + body: z.object({ + type: z.nativeEnum(PkiItemType).describe(PKI_COLLECTIONS.ADD_ITEM.type), + itemId: z.string().trim().describe(PKI_COLLECTIONS.ADD_ITEM.itemId) + }), + response: { + 200: PkiCollectionItemsSchema.omit({ caId: true, certId: true }).extend({ + type: z.nativeEnum(PkiItemType).describe(PKI_COLLECTIONS.ADD_ITEM.type), + itemId: z.string().trim().describe(PKI_COLLECTIONS.ADD_ITEM.itemId) + }) + } + }, + handler: async (req) => { + const { pkiCollection, pkiCollectionItem } = await server.services.pkiCollection.addItemToPkiCollection({ + collectionId: req.params.collectionId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: pkiCollection.projectId, + event: { + type: EventType.ADD_PKI_COLLECTION_ITEM, + metadata: { + pkiCollectionId: pkiCollection.id, + pkiCollectionItemId: pkiCollectionItem.id, + type: pkiCollectionItem.type, + itemId: pkiCollectionItem.itemId + } + } + }); + + return pkiCollectionItem; + } + }); + + server.route({ + method: "DELETE", + url: "/:collectionId/items/:collectionItemId", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + description: "Remove item from PKI collection", + params: z.object({ + collectionId: z.string().trim().describe(PKI_COLLECTIONS.DELETE_ITEM.collectionId), + collectionItemId: z.string().trim().describe(PKI_COLLECTIONS.DELETE_ITEM.collectionItemId) + }), + response: { + 200: PkiCollectionItemsSchema.omit({ caId: true, certId: true }).extend({ + type: z.nativeEnum(PkiItemType).describe(PKI_COLLECTIONS.DELETE_ITEM.type), + itemId: z.string().trim().describe(PKI_COLLECTIONS.DELETE_ITEM.itemId) + }) + } + }, + handler: async (req) => { + const { pkiCollection, pkiCollectionItem } = await server.services.pkiCollection.removeItemFromPkiCollection({ + collectionId: req.params.collectionId, + itemId: req.params.collectionItemId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: pkiCollection.projectId, + event: { + type: EventType.DELETE_PKI_COLLECTION_ITEM, + metadata: { + pkiCollectionId: pkiCollection.id, + pkiCollectionItemId: pkiCollectionItem.id + } + } + }); + + return pkiCollectionItem; + } + }); +}; diff --git a/backend/src/server/routes/v1/project-env-router.ts b/backend/src/server/routes/v1/project-env-router.ts index 341b8a184e..c5ded83e46 100644 --- a/backend/src/server/routes/v1/project-env-router.ts +++ b/backend/src/server/routes/v1/project-env-router.ts @@ -4,11 +4,107 @@ import { z } from "zod"; import { ProjectEnvironmentsSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { ENVIRONMENTS } from "@app/lib/api-docs"; -import { writeLimit } from "@app/server/config/rateLimiter"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; export const registerProjectEnvRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/:workspaceId/environments/:envId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Get Environment", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + // NOTE(daniel): workspaceId isn't used, but we need to keep it for backwards compatibility. The endpoint defined below, uses no project ID, and is takes a pure environment ID. + workspaceId: z.string().trim().describe(ENVIRONMENTS.GET.workspaceId), + envId: z.string().trim().describe(ENVIRONMENTS.GET.id) + }), + response: { + 200: z.object({ + environment: ProjectEnvironmentsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const environment = await server.services.projectEnv.getEnvironmentById({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + id: req.params.envId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: environment.projectId, + event: { + type: EventType.GET_ENVIRONMENT, + metadata: { + id: environment.id + } + } + }); + + return { environment }; + } + }); + + server.route({ + method: "GET", + url: "/environments/:envId", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get Environment by ID", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + envId: z.string().trim().describe(ENVIRONMENTS.GET.id) + }), + response: { + 200: z.object({ + environment: ProjectEnvironmentsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const environment = await server.services.projectEnv.getEnvironmentById({ + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + id: req.params.envId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: environment.projectId, + event: { + type: EventType.GET_ENVIRONMENT, + metadata: { + id: environment.id + } + } + }); + + return { environment }; + } + }); + server.route({ method: "POST", url: "/:workspaceId/environments", @@ -27,6 +123,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => { }), body: z.object({ name: z.string().trim().describe(ENVIRONMENTS.CREATE.name), + position: z.number().min(1).optional().describe(ENVIRONMENTS.CREATE.position), slug: z .string() .trim() diff --git a/backend/src/server/routes/v1/project-membership-router.ts b/backend/src/server/routes/v1/project-membership-router.ts index 6bbb8d7ef3..3c3a1ada4f 100644 --- a/backend/src/server/routes/v1/project-membership-router.ts +++ b/backend/src/server/routes/v1/project-membership-router.ts @@ -39,7 +39,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider email: true, firstName: true, lastName: true, - id: true + id: true, + username: true }).merge(UserEncryptionKeysSchema.pick({ publicKey: true })), roles: z.array( z.object({ @@ -56,7 +57,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider }) ) }) - .omit({ createdAt: true, updatedAt: true }) + .omit({ updatedAt: true }) .array() }) } @@ -74,6 +75,65 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider } }); + server.route({ + method: "GET", + url: "/:workspaceId/memberships/:membershipId", + config: { + rateLimit: readLimit + }, + schema: { + description: "Return project user membership", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + workspaceId: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.workspaceId), + membershipId: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.membershipId) + }), + response: { + 200: z.object({ + membership: ProjectMembershipsSchema.extend({ + user: UsersSchema.pick({ + email: true, + firstName: true, + lastName: true, + id: true, + username: true + }).merge(UserEncryptionKeysSchema.pick({ publicKey: true })), + roles: z.array( + z.object({ + id: z.string(), + role: z.string(), + customRoleId: z.string().optional().nullable(), + customRoleName: z.string().optional().nullable(), + customRoleSlug: z.string().optional().nullable(), + isTemporary: z.boolean(), + temporaryMode: z.string().optional().nullable(), + temporaryRange: z.string().nullable().optional(), + temporaryAccessStartTime: z.date().nullable().optional(), + temporaryAccessEndTime: z.date().nullable().optional() + }) + ) + }).omit({ updatedAt: true }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const membership = await server.services.projectMembership.getProjectMembershipById({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId, + id: req.params.membershipId + }); + return { membership }; + } + }); + server.route({ method: "POST", url: "/:workspaceId/memberships/details", @@ -309,4 +369,32 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider return { membership }; } }); + + server.route({ + method: "DELETE", + url: "/:workspaceId/leave", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + response: { + 200: z.object({ + membership: ProjectMembershipsSchema + }) + } + }, + + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const membership = await server.services.projectMembership.leaveProject({ + actorId: req.permission.id, + actor: req.permission.type, + projectId: req.params.workspaceId + }); + return { membership }; + } + }); }; diff --git a/backend/src/server/routes/v1/project-router.ts b/backend/src/server/routes/v1/project-router.ts index 1cf655a973..e5e2f636cd 100644 --- a/backend/src/server/routes/v1/project-router.ts +++ b/backend/src/server/routes/v1/project-router.ts @@ -3,20 +3,23 @@ import { z } from "zod"; import { IntegrationsSchema, ProjectMembershipsSchema, - ProjectsSchema, + ProjectRolesSchema, + ProjectSlackConfigsSchema, UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { PROJECTS } from "@app/lib/api-docs"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { ProjectFilterType } from "@app/services/project/project-types"; +import { validateSlackChannelsField } from "@app/services/slack/slack-auth-validators"; -import { integrationAuthPubSchema } from "../sanitizedSchemas"; +import { integrationAuthPubSchema, SanitizedProjectSchema } from "../sanitizedSchemas"; import { sanitizedServiceTokenSchema } from "../v2/service-token-router"; -const projectWithEnv = ProjectsSchema.merge( +const projectWithEnv = SanitizedProjectSchema.merge( z.object({ _id: z.string(), environments: z.object({ name: z.string(), slug: z.string(), id: z.string() }).array() @@ -65,12 +68,19 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { rateLimit: readLimit }, schema: { + querystring: z.object({ + includeGroupMembers: z + .enum(["true", "false"]) + .default("false") + .transform((value) => value === "true") + }), params: z.object({ workspaceId: z.string().trim() }), response: { 200: z.object({ users: ProjectMembershipsSchema.extend({ + isGroupMember: z.boolean(), user: UsersSchema.pick({ email: true, username: true, @@ -78,6 +88,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { lastName: true, id: true }).merge(UserEncryptionKeysSchema.pick({ publicKey: true })), + project: SanitizedProjectSchema.pick({ name: true, id: true }), roles: z.array( z.object({ id: z.string(), @@ -104,9 +115,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { actorId: req.permission.id, actor: req.permission.type, actorAuthMethod: req.permission.authMethod, + includeGroupMembers: req.query.includeGroupMembers, projectId: req.params.workspaceId, actorOrgId: req.permission.orgId }); + return { users }; } }); @@ -118,15 +131,31 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { rateLimit: readLimit }, schema: { + querystring: z.object({ + includeRoles: z + .enum(["true", "false"]) + .default("false") + .transform((value) => value === "true") + }), response: { 200: z.object({ - workspaces: projectWithEnv.array() + workspaces: projectWithEnv + .extend({ + roles: ProjectRolesSchema.array().optional() + }) + .array() }) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]), handler: async (req) => { - const workspaces = await server.services.project.getProjects(req.permission.id); + const workspaces = await server.services.project.getProjects({ + includeRoles: req.query.includeRoles, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + actorOrgId: req.permission.orgId + }); return { workspaces }; } }); @@ -187,7 +216,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - workspace: ProjectsSchema.optional() + workspace: SanitizedProjectSchema.optional() }) } }, @@ -223,7 +252,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { response: { 200: z.object({ message: z.string(), - workspace: ProjectsSchema + workspace: SanitizedProjectSchema }) } }, @@ -271,7 +300,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - workspace: ProjectsSchema + workspace: SanitizedProjectSchema }) } }, @@ -313,7 +342,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { response: { 200: z.object({ message: z.string(), - workspace: ProjectsSchema + workspace: SanitizedProjectSchema }) } }, @@ -334,6 +363,82 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "PUT", + url: "/:workspaceSlug/version-limit", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + workspaceSlug: z.string().trim() + }), + body: z.object({ + pitVersionLimit: z.number().min(1).max(100) + }), + response: { + 200: z.object({ + message: z.string(), + workspace: SanitizedProjectSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const workspace = await server.services.project.updateVersionLimit({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + pitVersionLimit: req.body.pitVersionLimit, + workspaceSlug: req.params.workspaceSlug + }); + + return { + message: "Successfully changed workspace version limit", + workspace + }; + } + }); + + server.route({ + method: "PUT", + url: "/:workspaceSlug/audit-logs-retention", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + workspaceSlug: z.string().trim() + }), + body: z.object({ + auditLogsRetentionDays: z.number().min(0) + }), + response: { + 200: z.object({ + message: z.string(), + workspace: SanitizedProjectSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const workspace = await server.services.project.updateAuditLogsRetention({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + workspaceSlug: req.params.workspaceSlug, + auditLogsRetentionDays: req.body.auditLogsRetentionDays + }); + + return { + message: "Successfully updated project's audit logs retention period", + workspace + }; + } + }); + server.route({ method: "GET", url: "/:workspaceId/integrations", @@ -440,4 +545,111 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { return { serviceTokenData }; } }); + + server.route({ + method: "GET", + url: "/:workspaceId/slack-config", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + response: { + 200: ProjectSlackConfigsSchema.pick({ + id: true, + slackIntegrationId: true, + isAccessRequestNotificationEnabled: true, + accessRequestChannels: true, + isSecretRequestNotificationEnabled: true, + secretRequestChannels: true + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const slackConfig = await server.services.project.getProjectSlackConfig({ + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId + }); + + if (slackConfig) { + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.workspaceId, + event: { + type: EventType.GET_PROJECT_SLACK_CONFIG, + metadata: { + id: slackConfig.id + } + } + }); + } + + return slackConfig; + } + }); + + server.route({ + method: "PUT", + url: "/:workspaceId/slack-config", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + workspaceId: z.string().trim() + }), + body: z.object({ + slackIntegrationId: z.string(), + isAccessRequestNotificationEnabled: z.boolean(), + accessRequestChannels: validateSlackChannelsField, + isSecretRequestNotificationEnabled: z.boolean(), + secretRequestChannels: validateSlackChannelsField + }), + response: { + 200: ProjectSlackConfigsSchema.pick({ + id: true, + slackIntegrationId: true, + isAccessRequestNotificationEnabled: true, + accessRequestChannels: true, + isSecretRequestNotificationEnabled: true, + secretRequestChannels: true + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const slackConfig = await server.services.project.updateProjectSlackConfig({ + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + projectId: req.params.workspaceId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.workspaceId, + event: { + type: EventType.UPDATE_PROJECT_SLACK_CONFIG, + metadata: { + id: slackConfig.id, + slackIntegrationId: slackConfig.slackIntegrationId, + isAccessRequestNotificationEnabled: slackConfig.isAccessRequestNotificationEnabled, + accessRequestChannels: slackConfig.accessRequestChannels, + isSecretRequestNotificationEnabled: slackConfig.isSecretRequestNotificationEnabled, + secretRequestChannels: slackConfig.secretRequestChannels + } + } + }); + + return slackConfig; + } + }); }; diff --git a/backend/src/server/routes/v1/secret-folder-router.ts b/backend/src/server/routes/v1/secret-folder-router.ts index 1a1747f641..088afec47b 100644 --- a/backend/src/server/routes/v1/secret-folder-router.ts +++ b/backend/src/server/routes/v1/secret-folder-router.ts @@ -3,7 +3,8 @@ import { z } from "zod"; import { SecretFoldersSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { FOLDERS } from "@app/lib/api-docs"; -import { removeTrailingSlash } from "@app/lib/fn"; +import { prefixWithSlash, removeTrailingSlash } from "@app/lib/fn"; +import { isValidFolderName } from "@app/lib/validator"; import { readLimit, secretsLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -25,10 +26,28 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) => body: z.object({ workspaceId: z.string().trim().describe(FOLDERS.CREATE.workspaceId), environment: z.string().trim().describe(FOLDERS.CREATE.environment), - name: z.string().trim().describe(FOLDERS.CREATE.name), - path: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.CREATE.path), + name: z + .string() + .trim() + .describe(FOLDERS.CREATE.name) + .refine((name) => isValidFolderName(name), { + message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed." + }), + path: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.CREATE.path), // backward compatiability with cli - directory: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.CREATE.directory) + directory: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.CREATE.directory) }), response: { 200: z.object({ @@ -85,10 +104,28 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) => body: z.object({ workspaceId: z.string().trim().describe(FOLDERS.UPDATE.workspaceId), environment: z.string().trim().describe(FOLDERS.UPDATE.environment), - name: z.string().trim().describe(FOLDERS.UPDATE.name), - path: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.UPDATE.path), + name: z + .string() + .trim() + .describe(FOLDERS.UPDATE.name) + .refine((name) => isValidFolderName(name), { + message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed." + }), + path: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.UPDATE.path), // backward compatiability with cli - directory: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.UPDATE.directory) + directory: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.UPDATE.directory) }), response: { 200: z.object({ @@ -146,8 +183,20 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) => .object({ id: z.string().describe(FOLDERS.UPDATE.folderId), environment: z.string().trim().describe(FOLDERS.UPDATE.environment), - name: z.string().trim().describe(FOLDERS.UPDATE.name), - path: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.UPDATE.path) + name: z + .string() + .trim() + .describe(FOLDERS.UPDATE.name) + .refine((name) => isValidFolderName(name), { + message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed." + }), + path: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.UPDATE.path) }) .array() .min(1) @@ -211,9 +260,21 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) => body: z.object({ workspaceId: z.string().trim().describe(FOLDERS.DELETE.workspaceId), environment: z.string().trim().describe(FOLDERS.DELETE.environment), - path: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.DELETE.path), + path: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.DELETE.path), // keep this here as cli need directory - directory: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.DELETE.directory) + directory: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.DELETE.directory) }), response: { 200: z.object({ @@ -267,9 +328,21 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) => querystring: z.object({ workspaceId: z.string().trim().describe(FOLDERS.LIST.workspaceId), environment: z.string().trim().describe(FOLDERS.LIST.environment), - path: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.LIST.path), + path: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.LIST.path), // backward compatiability with cli - directory: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.LIST.directory) + directory: z + .string() + .trim() + .default("/") + .transform(prefixWithSlash) + .transform(removeTrailingSlash) + .describe(FOLDERS.LIST.directory) }), response: { 200: z.object({ @@ -292,4 +365,47 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) => return { folders }; } }); + + server.route({ + method: "GET", + url: "/:id", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get folder by id", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string().trim().describe(FOLDERS.GET_BY_ID.folderId) + }), + response: { + 200: z.object({ + folder: SecretFoldersSchema.extend({ + environment: z.object({ + envId: z.string(), + envName: z.string(), + envSlug: z.string() + }), + path: z.string(), + projectId: z.string() + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const folder = await server.services.folder.getFolderById({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + return { folder }; + } + }); }; diff --git a/backend/src/server/routes/v1/secret-import-router.ts b/backend/src/server/routes/v1/secret-import-router.ts index ca604e7382..aa6efdf369 100644 --- a/backend/src/server/routes/v1/secret-import-router.ts +++ b/backend/src/server/routes/v1/secret-import-router.ts @@ -8,6 +8,8 @@ import { readLimit, secretsLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { secretRawSchema } from "../sanitizedSchemas"; + export const registerSecretImportRouter = async (server: FastifyZodProvider) => { server.route({ method: "POST", @@ -310,6 +312,64 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) => } }); + server.route({ + url: "/:secretImportId", + method: "GET", + config: { + rateLimit: readLimit + }, + schema: { + description: "Get single secret import", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + secretImportId: z.string().trim().describe(SECRET_IMPORTS.GET.secretImportId) + }), + response: { + 200: z.object({ + secretImport: SecretImportsSchema.omit({ importEnv: true }).extend({ + environment: z.object({ + id: z.string(), + name: z.string(), + slug: z.string() + }), + projectId: z.string(), + importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() }), + secretPath: z.string() + }) + }) + } + }, + + onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const secretImport = await server.services.secretImport.getImportById({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.secretImportId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: secretImport.projectId, + event: { + type: EventType.GET_SECRET_IMPORT, + metadata: { + secretImportId: secretImport.id, + folderId: secretImport.folderId + } + } + }); + + return { secretImport }; + } + }); + server.route({ url: "/secrets", method: "GET", @@ -353,4 +413,48 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) => return { secrets: importedSecrets }; } }); + + server.route({ + url: "/secrets/raw", + method: "GET", + config: { + rateLimit: secretsLimit + }, + schema: { + querystring: z.object({ + workspaceId: z.string().trim(), + environment: z.string().trim(), + path: z.string().trim().default("/").transform(removeTrailingSlash) + }), + response: { + 200: z.object({ + secrets: z + .object({ + secretPath: z.string(), + environment: z.string(), + environmentInfo: z.object({ + id: z.string(), + name: z.string(), + slug: z.string() + }), + folderId: z.string().optional(), + secrets: secretRawSchema.array() + }) + .array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const importedSecrets = await server.services.secretImport.getRawSecretsFromImports({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.query, + projectId: req.query.workspaceId + }); + return { secrets: importedSecrets }; + } + }); }; diff --git a/backend/src/server/routes/v1/secret-sharing-router.ts b/backend/src/server/routes/v1/secret-sharing-router.ts index 6cb5516986..3363cc6c0a 100644 --- a/backend/src/server/routes/v1/secret-sharing-router.ts +++ b/backend/src/server/routes/v1/secret-sharing-router.ts @@ -1,7 +1,13 @@ import { z } from "zod"; import { SecretSharingSchema } from "@app/db/schemas"; -import { publicEndpointLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { SecretSharingAccessType } from "@app/lib/types"; +import { + publicEndpointLimit, + publicSecretShareCreationLimit, + readLimit, + writeLimit +} from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -13,60 +19,104 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) => rateLimit: readLimit }, schema: { + querystring: z.object({ + offset: z.coerce.number().min(0).max(100).default(0), + limit: z.coerce.number().min(1).max(100).default(25) + }), response: { - 200: z.array(SecretSharingSchema) + 200: z.object({ + secrets: z.array(SecretSharingSchema), + totalCount: z.number() + }) } }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const sharedSecrets = await req.server.services.secretSharing.getSharedSecrets({ + const { secrets, totalCount } = await req.server.services.secretSharing.getSharedSecrets({ actor: req.permission.type, actorId: req.permission.id, - orgId: req.permission.orgId, actorAuthMethod: req.permission.authMethod, - actorOrgId: req.permission.orgId + actorOrgId: req.permission.orgId, + ...req.query }); - return sharedSecrets; + return { + secrets, + totalCount + }; } }); server.route({ - method: "GET", + method: "POST", url: "/public/:id", config: { rateLimit: publicEndpointLimit }, schema: { params: z.object({ - id: z.string().uuid() + id: z.string() }), - querystring: z.object({ - hashedHex: z.string() + body: z.object({ + hashedHex: z.string().min(1).optional(), + password: z.string().optional() }), response: { - 200: SecretSharingSchema.pick({ - encryptedValue: true, - iv: true, - tag: true, - expiresAt: true, - expiresAfterViews: true + 200: z.object({ + isPasswordProtected: z.boolean(), + secret: SecretSharingSchema.pick({ + encryptedValue: true, + iv: true, + tag: true, + expiresAt: true, + expiresAfterViews: true, + accessType: true + }) + .extend({ + orgName: z.string().optional(), + secretValue: z.string().optional() + }) + .optional() }) } }, handler: async (req) => { - const sharedSecret = await req.server.services.secretSharing.getActiveSharedSecretByIdAndHashedHex( - req.params.id, - req.query.hashedHex - ); - if (!sharedSecret) return undefined; - return { - encryptedValue: sharedSecret.encryptedValue, - iv: sharedSecret.iv, - tag: sharedSecret.tag, - expiresAt: sharedSecret.expiresAt, - expiresAfterViews: sharedSecret.expiresAfterViews - }; + const sharedSecret = await req.server.services.secretSharing.getSharedSecretById({ + sharedSecretId: req.params.id, + hashedHex: req.body.hashedHex, + password: req.body.password, + orgId: req.permission?.orgId + }); + + return sharedSecret; + } + }); + + server.route({ + method: "POST", + url: "/public", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + secretValue: z.string().max(10_000), + password: z.string().optional(), + expiresAt: z.string(), + expiresAfterViews: z.number().min(1).optional() + }), + response: { + 200: z.object({ + id: z.string() + }) + } + }, + handler: async (req) => { + const sharedSecret = await req.server.services.secretSharing.createPublicSharedSecret({ + ...req.body, + accessType: SecretSharingAccessType.Anyone + }); + return { id: sharedSecret.id }; } }); @@ -74,40 +124,32 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) => method: "POST", url: "/", config: { - rateLimit: writeLimit + rateLimit: publicSecretShareCreationLimit }, schema: { body: z.object({ - encryptedValue: z.string(), - iv: z.string(), - tag: z.string(), - hashedHex: z.string(), - expiresAt: z - .string() - .refine((date) => date === undefined || new Date(date) > new Date(), "Expires at should be a future date"), - expiresAfterViews: z.number() + name: z.string().max(50).optional(), + password: z.string().optional(), + secretValue: z.string(), + expiresAt: z.string(), + expiresAfterViews: z.number().min(1).optional(), + accessType: z.nativeEnum(SecretSharingAccessType).default(SecretSharingAccessType.Organization) }), response: { 200: z.object({ - id: z.string().uuid() + id: z.string() }) } }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const { encryptedValue, iv, tag, hashedHex, expiresAt, expiresAfterViews } = req.body; const sharedSecret = await req.server.services.secretSharing.createSharedSecret({ actor: req.permission.type, actorId: req.permission.id, orgId: req.permission.orgId, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - encryptedValue, - iv, - tag, - hashedHex, - expiresAt: new Date(expiresAt), - expiresAfterViews + ...req.body }); return { id: sharedSecret.id }; } @@ -121,7 +163,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) => }, schema: { params: z.object({ - sharedSecretId: z.string().uuid() + sharedSecretId: z.string() }), response: { 200: SecretSharingSchema diff --git a/backend/src/server/routes/v1/secret-tag-router.ts b/backend/src/server/routes/v1/secret-tag-router.ts index 1715aa3c30..7d696999e8 100644 --- a/backend/src/server/routes/v1/secret-tag-router.ts +++ b/backend/src/server/routes/v1/secret-tag-router.ts @@ -1,3 +1,4 @@ +import slugify from "@sindresorhus/slugify"; import { z } from "zod"; import { SecretTagsSchema } from "@app/db/schemas"; @@ -23,7 +24,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const workspaceTags = await server.services.secretTag.getProjectTags({ actor: req.permission.type, @@ -36,6 +37,69 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/:projectId/tags/:tagId", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + projectId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_ID.projectId), + tagId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_ID.tagId) + }), + response: { + 200: z.object({ + // akhilmhdh: for terraform backward compatiability + workspaceTag: SecretTagsSchema.extend({ name: z.string() }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const workspaceTag = await server.services.secretTag.getTagById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.tagId + }); + return { workspaceTag }; + } + }); + + server.route({ + method: "GET", + url: "/:projectId/tags/slug/:tagSlug", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + projectId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_SLUG.projectId), + tagSlug: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_SLUG.tagSlug) + }), + response: { + 200: z.object({ + // akhilmhdh: for terraform backward compatiability + workspaceTag: SecretTagsSchema.extend({ name: z.string() }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const workspaceTag = await server.services.secretTag.getTagBySlug({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + slug: req.params.tagSlug, + projectId: req.params.projectId + }); + return { workspaceTag }; + } + }); + server.route({ method: "POST", url: "/:projectId/tags", @@ -47,8 +111,14 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => { projectId: z.string().trim().describe(SECRET_TAGS.CREATE.projectId) }), body: z.object({ - name: z.string().trim().describe(SECRET_TAGS.CREATE.name), - slug: z.string().trim().describe(SECRET_TAGS.CREATE.slug), + slug: z + .string() + .toLowerCase() + .trim() + .describe(SECRET_TAGS.CREATE.slug) + .refine((v) => slugify(v) === v, { + message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens." + }), color: z.string().trim().describe(SECRET_TAGS.CREATE.color) }), response: { @@ -57,7 +127,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const workspaceTag = await server.services.secretTag.createTag({ actor: req.permission.type, @@ -71,6 +141,48 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "PATCH", + url: "/:projectId/tags/:tagId", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + projectId: z.string().trim().describe(SECRET_TAGS.UPDATE.projectId), + tagId: z.string().trim().describe(SECRET_TAGS.UPDATE.tagId) + }), + body: z.object({ + slug: z + .string() + .toLowerCase() + .trim() + .describe(SECRET_TAGS.UPDATE.slug) + .refine((v) => slugify(v) === v, { + message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens." + }), + color: z.string().trim().describe(SECRET_TAGS.UPDATE.color) + }), + response: { + 200: z.object({ + workspaceTag: SecretTagsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const workspaceTag = await server.services.secretTag.updateTag({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body, + id: req.params.tagId + }); + return { workspaceTag }; + } + }); + server.route({ method: "DELETE", url: "/:projectId/tags/:tagId", @@ -88,7 +200,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.JWT]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const workspaceTag = await server.services.secretTag.deleteTag({ actor: req.permission.type, diff --git a/backend/src/server/routes/v1/slack-router.ts b/backend/src/server/routes/v1/slack-router.ts new file mode 100644 index 0000000000..0601e2d1f1 --- /dev/null +++ b/backend/src/server/routes/v1/slack-router.ts @@ -0,0 +1,355 @@ +import slugify from "@sindresorhus/slugify"; +import { z } from "zod"; + +import { SlackIntegrationsSchema, WorkflowIntegrationsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { getConfig } from "@app/lib/config/env"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const sanitizedSlackIntegrationSchema = WorkflowIntegrationsSchema.pick({ + id: true, + description: true, + slug: true, + integration: true +}).merge( + SlackIntegrationsSchema.pick({ + teamName: true + }) +); + +export const registerSlackRouter = async (server: FastifyZodProvider) => { + const appCfg = getConfig(); + + server.route({ + method: "GET", + url: "/install", + config: { + rateLimit: readLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + slug: z + .string() + .trim() + .refine((v) => slugify(v) === v, { + message: "Slug must be a valid slug" + }), + description: z.string().optional() + }), + response: { + 200: z.string() + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const url = await server.services.slack.getInstallUrl({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.query + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION, + metadata: { + slug: req.query.slug, + description: req.query.description + } + } + }); + + return url; + } + }); + + server.route({ + method: "GET", + url: "/reinstall", + config: { + rateLimit: readLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + querystring: z.object({ + id: z.string() + }), + response: { + 200: z.string() + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const url = await server.services.slack.getReinstallUrl({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.query.id + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.ATTEMPT_REINSTALL_SLACK_INTEGRATION, + metadata: { + id: req.query.id + } + } + }); + + return url; + } + }); + + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + response: { + 200: sanitizedSlackIntegrationSchema.array() + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const slackIntegrations = await server.services.slack.getSlackIntegrationsByOrg({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + return slackIntegrations; + } + }); + + server.route({ + method: "DELETE", + url: "/:id", + config: { + rateLimit: writeLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string() + }), + response: { + 200: sanitizedSlackIntegrationSchema + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const deletedSlackIntegration = await server.services.slack.deleteSlackIntegration({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: deletedSlackIntegration.orgId, + event: { + type: EventType.DELETE_SLACK_INTEGRATION, + metadata: { + id: deletedSlackIntegration.id + } + } + }); + + return deletedSlackIntegration; + } + }); + + server.route({ + method: "GET", + url: "/:id", + config: { + rateLimit: readLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string() + }), + response: { + 200: sanitizedSlackIntegrationSchema + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const slackIntegration = await server.services.slack.getSlackIntegrationById({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: slackIntegration.orgId, + event: { + type: EventType.GET_SLACK_INTEGRATION, + metadata: { + id: slackIntegration.id + } + } + }); + + return slackIntegration; + } + }); + + server.route({ + method: "GET", + url: "/:id/channels", + config: { + rateLimit: readLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string() + }), + response: { + 200: z + .object({ + name: z.string(), + id: z.string() + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const slackChannels = await server.services.slack.getSlackIntegrationChannels({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id + }); + + return slackChannels; + } + }); + + server.route({ + method: "PATCH", + url: "/:id", + config: { + rateLimit: readLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + id: z.string() + }), + body: z.object({ + slug: z + .string() + .trim() + .refine((v) => slugify(v) === v, { + message: "Slug must be a valid slug" + }) + .optional(), + description: z.string().optional() + }), + response: { + 200: sanitizedSlackIntegrationSchema + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const slackIntegration = await server.services.slack.updateSlackIntegration({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + id: req.params.id, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: slackIntegration.orgId, + event: { + type: EventType.UPDATE_SLACK_INTEGRATION, + metadata: { + id: slackIntegration.id, + slug: slackIntegration.slug, + description: slackIntegration.description as string + } + } + }); + + return slackIntegration; + } + }); + + server.route({ + method: "GET", + url: "/oauth_redirect", + config: { + rateLimit: readLimit + }, + handler: async (req, res) => { + const installer = await server.services.slack.getSlackInstaller(); + + return installer.handleCallback(req.raw, res.raw, { + failureAsync: async () => { + return res.redirect(appCfg.SITE_URL as string); + }, + successAsync: async (installation) => { + const metadata = JSON.parse(installation.metadata || "") as { + orgId: string; + }; + + return res.redirect(`${appCfg.SITE_URL}/org/${metadata.orgId}/settings?selectedTab=workflow-integrations`); + } + }); + } + }); +}; diff --git a/backend/src/server/routes/v1/sso-router.ts b/backend/src/server/routes/v1/sso-router.ts index 60bbec7db9..9007ca828e 100644 --- a/backend/src/server/routes/v1/sso-router.ts +++ b/backend/src/server/routes/v1/sso-router.ts @@ -14,7 +14,7 @@ import { Strategy as GoogleStrategy } from "passport-google-oauth20"; import { z } from "zod"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError } from "@app/lib/errors"; +import { NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { fetchGithubEmails } from "@app/lib/requests/github"; import { AuthMethod } from "@app/services/auth/auth-type"; @@ -42,9 +42,9 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => { try { const email = profile?.emails?.[0]?.value; if (!email) - throw new BadRequestError({ + throw new NotFoundError({ message: "Email not found", - name: "Oauth Google Register" + name: "OauthGoogleRegister" }); const { isUserCompleted, providerAuthToken } = await server.services.login.oauth2Login({ @@ -57,7 +57,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => { cb(null, { isUserCompleted, providerAuthToken }); } catch (error) { logger.error(error); - cb(null, false); + cb(error as Error, false); } } ) @@ -91,7 +91,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => { return cb(null, { isUserCompleted, providerAuthToken }); } catch (error) { logger.error(error); - cb(null, false); + cb(error as Error, false); } } ) @@ -126,7 +126,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => { return cb(null, { isUserCompleted, providerAuthToken }); } catch (error) { logger.error(error); - cb(null, false); + cb(error as Error, false); } } ) @@ -259,4 +259,45 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => { ); } }); + + server.route({ + url: "/token-exchange", + method: "POST", + schema: { + body: z.object({ + providerAuthToken: z.string(), + email: z.string() + }) + }, + handler: async (req, res) => { + const userAgent = req.headers["user-agent"]; + if (!userAgent) throw new Error("user agent header is required"); + + const data = await server.services.login.oauth2TokenExchange({ + email: req.body.email, + ip: req.realIp, + userAgent, + providerAuthToken: req.body.providerAuthToken + }); + + void res.setCookie("jid", data.token.refresh, { + httpOnly: true, + path: "/", + sameSite: "strict", + secure: appCfg.HTTPS_ENABLED + }); + + return { + encryptionVersion: data.user.encryptionVersion, + token: data.token.access, + publicKey: data.user.publicKey, + encryptedPrivateKey: data.user.encryptedPrivateKey, + iv: data.user.iv, + tag: data.user.tag, + protectedKey: data.user.protectedKey || null, + protectedKeyIV: data.user.protectedKeyIV || null, + protectedKeyTag: data.user.protectedKeyTag || null + } as const; + } + }); }; diff --git a/backend/src/server/routes/v1/user-engagement-router.ts b/backend/src/server/routes/v1/user-engagement-router.ts new file mode 100644 index 0000000000..e3ce6532e1 --- /dev/null +++ b/backend/src/server/routes/v1/user-engagement-router.ts @@ -0,0 +1,27 @@ +import { z } from "zod"; + +import { userEngagementLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +export const registerUserEngagementRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "POST", + url: "/me/wish", + config: { + rateLimit: userEngagementLimit + }, + schema: { + body: z.object({ + text: z.string().min(1) + }), + response: { + 200: z.object({}) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + return server.services.userEngagement.createUserWish(req.permission.id, req.body.text); + } + }); +}; diff --git a/backend/src/server/routes/v1/user-router.ts b/backend/src/server/routes/v1/user-router.ts index 3d9f531b9d..a97f11be49 100644 --- a/backend/src/server/routes/v1/user-router.ts +++ b/backend/src/server/routes/v1/user-router.ts @@ -3,7 +3,7 @@ import { z } from "zod"; import { UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; import { logger } from "@app/lib/logger"; -import { authRateLimit, readLimit } from "@app/server/config/rateLimiter"; +import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -19,7 +19,23 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { schema: { response: { 200: z.object({ - user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true })) + user: UsersSchema.merge( + UserEncryptionKeysSchema.pick({ + clientPublicKey: true, + serverPrivateKey: true, + encryptionVersion: true, + protectedKey: true, + protectedKeyIV: true, + protectedKeyTag: true, + publicKey: true, + encryptedPrivateKey: true, + iv: true, + tag: true, + salt: true, + verifier: true, + userId: true + }) + ) }) } }, @@ -30,6 +46,26 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/private-key", + config: { + rateLimit: readLimit + }, + schema: { + response: { + 200: z.object({ + privateKey: z.string() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }), + handler: async (req) => { + const privateKey = await server.services.user.getUserPrivateKey(req.permission.id); + return { privateKey }; + } + }); + server.route({ method: "GET", url: "/:userId/unlock", @@ -54,4 +90,182 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { return res.redirect(`${appCfg.SITE_URL}/login`); } }); + + server.route({ + method: "GET", + url: "/me/project-favorites", + config: { + rateLimit: readLimit + }, + schema: { + querystring: z.object({ + orgId: z.string().trim() + }), + response: { + 200: z.object({ + projectFavorites: z.string().array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + return server.services.user.getUserProjectFavorites(req.permission.id, req.query.orgId); + } + }); + + server.route({ + method: "PUT", + url: "/me/project-favorites", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + orgId: z.string().trim(), + projectFavorites: z.string().array() + }) + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + return server.services.user.updateUserProjectFavorites( + req.permission.id, + req.body.orgId, + req.body.projectFavorites + ); + } + }); + + server.route({ + method: "GET", + url: "/me/:username/groups", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + username: z.string().trim() + }), + response: { + 200: z + .object({ + id: z.string(), + name: z.string(), + slug: z.string(), + orgId: z.string() + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const groupMemberships = await server.services.user.listUserGroups({ + username: req.params.username, + actorOrgId: req.permission.orgId, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + + return groupMemberships; + } + }); + + server.route({ + method: "GET", + url: "/me/totp", + config: { + rateLimit: readLimit + }, + schema: { + response: { + 200: z.object({ + isVerified: z.boolean(), + recoveryCodes: z.string().array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + return server.services.totp.getUserTotpConfig({ + userId: req.permission.id + }); + } + }); + + server.route({ + method: "DELETE", + url: "/me/totp", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + return server.services.totp.deleteUserTotpConfig({ + userId: req.permission.id + }); + } + }); + + server.route({ + method: "POST", + url: "/me/totp/register", + config: { + rateLimit: writeLimit + }, + schema: { + response: { + 200: z.object({ + otpUrl: z.string(), + recoveryCodes: z.string().array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT], { + requireOrg: false + }), + handler: async (req) => { + return server.services.totp.registerUserTotp({ + userId: req.permission.id + }); + } + }); + + server.route({ + method: "POST", + url: "/me/totp/verify", + config: { + rateLimit: writeLimit + }, + schema: { + body: z.object({ + totp: z.string() + }), + response: { + 200: z.object({}) + } + }, + onRequest: verifyAuth([AuthMode.JWT], { + requireOrg: false + }), + handler: async (req) => { + return server.services.totp.verifyUserTotpConfig({ + userId: req.permission.id, + totp: req.body.totp + }); + } + }); + + server.route({ + method: "POST", + url: "/me/totp/recovery-codes", + config: { + rateLimit: writeLimit + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + return server.services.totp.createUserTotpRecoveryCodes({ + userId: req.permission.id + }); + } + }); }; diff --git a/backend/src/server/routes/v1/webhook-router.ts b/backend/src/server/routes/v1/webhook-router.ts index 1698c0c4b5..377af135c8 100644 --- a/backend/src/server/routes/v1/webhook-router.ts +++ b/backend/src/server/routes/v1/webhook-router.ts @@ -6,23 +6,26 @@ import { removeTrailingSlash } from "@app/lib/fn"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { WebhookType } from "@app/services/webhook/webhook-types"; -export const sanitizedWebhookSchema = WebhooksSchema.omit({ - encryptedSecretKey: true, - iv: true, - tag: true, - algorithm: true, - keyEncoding: true -}).merge( - z.object({ - projectId: z.string(), - environment: z.object({ - id: z.string(), - name: z.string(), - slug: z.string() - }) +export const sanitizedWebhookSchema = WebhooksSchema.pick({ + id: true, + secretPath: true, + lastStatus: true, + lastRunErrorMessage: true, + isDisabled: true, + createdAt: true, + updatedAt: true, + envId: true, + type: true +}).extend({ + projectId: z.string(), + environment: z.object({ + id: z.string(), + name: z.string(), + slug: z.string() }) -); +}); export const registerWebhookRouter = async (server: FastifyZodProvider) => { server.route({ @@ -33,13 +36,24 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { }, onRequest: verifyAuth([AuthMode.JWT]), schema: { - body: z.object({ - workspaceId: z.string().trim(), - environment: z.string().trim(), - webhookUrl: z.string().url().trim(), - webhookSecretKey: z.string().trim().optional(), - secretPath: z.string().trim().default("/").transform(removeTrailingSlash) - }), + body: z + .object({ + type: z.nativeEnum(WebhookType).default(WebhookType.GENERAL), + workspaceId: z.string().trim(), + environment: z.string().trim(), + webhookUrl: z.string().url().trim(), + webhookSecretKey: z.string().trim().optional(), + secretPath: z.string().trim().default("/").transform(removeTrailingSlash) + }) + .superRefine((data, ctx) => { + if (data.type === WebhookType.SLACK && !data.webhookUrl.includes("hooks.slack.com")) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Incoming Webhook URL is invalid.", + path: ["webhookUrl"] + }); + } + }), response: { 200: z.object({ message: z.string(), @@ -66,8 +80,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { environment: webhook.environment.slug, webhookId: webhook.id, isDisabled: webhook.isDisabled, - secretPath: webhook.secretPath, - webhookUrl: webhook.url + secretPath: webhook.secretPath } } }); @@ -116,8 +129,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { environment: webhook.environment.slug, webhookId: webhook.id, isDisabled: webhook.isDisabled, - secretPath: webhook.secretPath, - webhookUrl: webhook.url + secretPath: webhook.secretPath } } }); @@ -156,8 +168,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { environment: webhook.environment.slug, webhookId: webhook.id, isDisabled: webhook.isDisabled, - secretPath: webhook.secretPath, - webhookUrl: webhook.url + secretPath: webhook.secretPath } } }); @@ -216,7 +227,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { response: { 200: z.object({ message: z.string(), - webhooks: sanitizedWebhookSchema.array() + webhooks: sanitizedWebhookSchema.extend({ url: z.string() }).array() }) } }, diff --git a/backend/src/server/routes/v1/workflow-integration-router.ts b/backend/src/server/routes/v1/workflow-integration-router.ts new file mode 100644 index 0000000000..839d7b0563 --- /dev/null +++ b/backend/src/server/routes/v1/workflow-integration-router.ts @@ -0,0 +1,42 @@ +import { WorkflowIntegrationsSchema } from "@app/db/schemas"; +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const sanitizedWorkflowIntegrationSchema = WorkflowIntegrationsSchema.pick({ + id: true, + description: true, + slug: true, + integration: true +}); + +export const registerWorkflowIntegrationRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + security: [ + { + bearerAuth: [] + } + ], + response: { + 200: sanitizedWorkflowIntegrationSchema.array() + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const workflowIntegrations = await server.services.workflowIntegration.getIntegrationsByOrg({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + return workflowIntegrations; + } + }); +}; diff --git a/backend/src/server/routes/v2/group-project-router.ts b/backend/src/server/routes/v2/group-project-router.ts index 6d438c1ff0..cbc54f5ace 100644 --- a/backend/src/server/routes/v2/group-project-router.ts +++ b/backend/src/server/routes/v2/group-project-router.ts @@ -8,6 +8,7 @@ import { ProjectUserMembershipRolesSchema } from "@app/db/schemas"; import { PROJECTS } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { ProjectUserMembershipTemporaryMode } from "@app/services/project-membership/project-membership-types"; @@ -15,8 +16,11 @@ import { ProjectUserMembershipTemporaryMode } from "@app/services/project-member export const registerGroupProjectRouter = async (server: FastifyZodProvider) => { server.route({ method: "POST", - url: "/:projectSlug/groups/:groupSlug", + url: "/:projectId/groups/:groupId", onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + config: { + rateLimit: writeLimit + }, schema: { description: "Add group to project", security: [ @@ -25,17 +29,39 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => } ], params: z.object({ - projectSlug: z.string().trim().describe(PROJECTS.ADD_GROUP_TO_PROJECT.projectSlug), - groupSlug: z.string().trim().describe(PROJECTS.ADD_GROUP_TO_PROJECT.groupSlug) - }), - body: z.object({ - role: z - .string() - .trim() - .min(1) - .default(ProjectMembershipRole.NoAccess) - .describe(PROJECTS.ADD_GROUP_TO_PROJECT.role) + projectId: z.string().trim().describe(PROJECTS.ADD_GROUP_TO_PROJECT.projectId), + groupId: z.string().trim().describe(PROJECTS.ADD_GROUP_TO_PROJECT.groupId) }), + body: z + .object({ + role: z + .string() + .trim() + .min(1) + .default(ProjectMembershipRole.NoAccess) + .describe(PROJECTS.ADD_GROUP_TO_PROJECT.role), + roles: z + .array( + z.union([ + z.object({ + role: z.string(), + isTemporary: z.literal(false).default(false) + }), + z.object({ + role: z.string(), + isTemporary: z.literal(true), + temporaryMode: z.nativeEnum(ProjectUserMembershipTemporaryMode), + temporaryRange: z.string().refine((val) => ms(val) > 0, "Temporary range must be a positive number"), + temporaryAccessStartTime: z.string().datetime() + }) + ]) + ) + .optional() + }) + .refine((data) => data.role || data.roles, { + message: "Either role or roles must be present", + path: ["role", "roles"] + }), response: { 200: z.object({ groupMembership: GroupProjectMembershipsSchema @@ -48,17 +74,18 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - groupSlug: req.params.groupSlug, - projectSlug: req.params.projectSlug, - role: req.body.role + roles: req.body.roles || [{ role: req.body.role }], + projectId: req.params.projectId, + groupId: req.params.groupId }); + return { groupMembership }; } }); server.route({ method: "PATCH", - url: "/:projectSlug/groups/:groupSlug", + url: "/:projectId/groups/:groupId", onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { description: "Update group in project", @@ -68,8 +95,8 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => } ], params: z.object({ - projectSlug: z.string().trim().describe(PROJECTS.UPDATE_GROUP_IN_PROJECT.projectSlug), - groupSlug: z.string().trim().describe(PROJECTS.UPDATE_GROUP_IN_PROJECT.groupSlug) + projectId: z.string().trim().describe(PROJECTS.UPDATE_GROUP_IN_PROJECT.projectId), + groupId: z.string().trim().describe(PROJECTS.UPDATE_GROUP_IN_PROJECT.groupId) }), body: z.object({ roles: z @@ -103,18 +130,22 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - groupSlug: req.params.groupSlug, - projectSlug: req.params.projectSlug, + projectId: req.params.projectId, + groupId: req.params.groupId, roles: req.body.roles }); + return { roles }; } }); server.route({ method: "DELETE", - url: "/:projectSlug/groups/:groupSlug", + url: "/:projectId/groups/:groupId", onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + config: { + rateLimit: writeLimit + }, schema: { description: "Remove group from project", security: [ @@ -123,8 +154,8 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => } ], params: z.object({ - projectSlug: z.string().trim().describe(PROJECTS.REMOVE_GROUP_FROM_PROJECT.projectSlug), - groupSlug: z.string().trim().describe(PROJECTS.REMOVE_GROUP_FROM_PROJECT.groupSlug) + projectId: z.string().trim().describe(PROJECTS.REMOVE_GROUP_FROM_PROJECT.projectId), + groupId: z.string().trim().describe(PROJECTS.REMOVE_GROUP_FROM_PROJECT.groupId) }), response: { 200: z.object({ @@ -138,17 +169,21 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - groupSlug: req.params.groupSlug, - projectSlug: req.params.projectSlug + groupId: req.params.groupId, + projectId: req.params.projectId }); + return { groupMembership }; } }); server.route({ method: "GET", - url: "/:projectSlug/groups", + url: "/:projectId/groups", onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + config: { + rateLimit: readLimit + }, schema: { description: "Return list of groups in project", security: [ @@ -157,7 +192,7 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => } ], params: z.object({ - projectSlug: z.string().trim().describe(PROJECTS.LIST_GROUPS_IN_PROJECT.projectSlug) + projectId: z.string().trim().describe(PROJECTS.LIST_GROUPS_IN_PROJECT.projectId) }), response: { 200: z.object({ @@ -193,9 +228,67 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) => actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - projectSlug: req.params.projectSlug + projectId: req.params.projectId }); + return { groupMemberships }; } }); + + server.route({ + method: "GET", + url: "/:projectId/groups/:groupId", + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + config: { + rateLimit: readLimit + }, + schema: { + description: "Return project group", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + projectId: z.string().trim(), + groupId: z.string().trim() + }), + response: { + 200: z.object({ + groupMembership: z.object({ + id: z.string(), + groupId: z.string(), + createdAt: z.date(), + updatedAt: z.date(), + roles: z.array( + z.object({ + id: z.string(), + role: z.string(), + customRoleId: z.string().optional().nullable(), + customRoleName: z.string().optional().nullable(), + customRoleSlug: z.string().optional().nullable(), + isTemporary: z.boolean(), + temporaryMode: z.string().optional().nullable(), + temporaryRange: z.string().nullable().optional(), + temporaryAccessStartTime: z.date().nullable().optional(), + temporaryAccessEndTime: z.date().nullable().optional() + }) + ), + group: GroupsSchema.pick({ name: true, id: true, slug: true }) + }) + }) + } + }, + handler: async (req) => { + const groupMembership = await server.services.groupProject.getGroupInProject({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.params + }); + + return { groupMembership }; + } + }); }; diff --git a/backend/src/server/routes/v2/identity-org-router.ts b/backend/src/server/routes/v2/identity-org-router.ts index aab84ef8df..52940eb44b 100644 --- a/backend/src/server/routes/v2/identity-org-router.ts +++ b/backend/src/server/routes/v2/identity-org-router.ts @@ -2,9 +2,11 @@ import { z } from "zod"; import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas"; import { ORGANIZATIONS } from "@app/lib/api-docs"; +import { OrderByDirection } from "@app/lib/types"; import { readLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { OrgIdentityOrderBy } from "@app/services/identity/identity-types"; export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => { server.route({ @@ -24,6 +26,27 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => { params: z.object({ orgId: z.string().trim().describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orgId) }), + querystring: z.object({ + offset: z.coerce.number().min(0).default(0).describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.offset).optional(), + limit: z.coerce + .number() + .min(1) + .max(20000) // TODO: temp limit until combobox added to add identity to project modal, reduce once added + .default(100) + .describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.limit) + .optional(), + orderBy: z + .nativeEnum(OrgIdentityOrderBy) + .default(OrgIdentityOrderBy.Name) + .describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderBy) + .optional(), + orderDirection: z + .nativeEnum(OrderByDirection) + .default(OrderByDirection.ASC) + .describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderDirection) + .optional(), + search: z.string().trim().describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.search).optional() + }), response: { 200: z.object({ identityMemberships: IdentityOrgMembershipsSchema.merge( @@ -35,22 +58,30 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => { permissions: true, description: true }).optional(), - identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }) + identity: IdentitiesSchema.pick({ name: true, id: true }).extend({ + authMethods: z.array(z.string()) + }) }) - ).array() + ).array(), + totalCount: z.number() }) } }, handler: async (req) => { - const identityMemberships = await server.services.identity.listOrgIdentities({ + const { identityMemberships, totalCount } = await server.services.identity.listOrgIdentities({ actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - orgId: req.params.orgId + orgId: req.params.orgId, + limit: req.query.limit, + offset: req.query.offset, + orderBy: req.query.orderBy, + orderDirection: req.query.orderDirection, + search: req.query.search }); - return { identityMemberships }; + return { identityMemberships, totalCount }; } }); }; diff --git a/backend/src/server/routes/v2/identity-project-router.ts b/backend/src/server/routes/v2/identity-project-router.ts index d259a46fd4..b2cc3a8e9f 100644 --- a/backend/src/server/routes/v2/identity-project-router.ts +++ b/backend/src/server/routes/v2/identity-project-router.ts @@ -7,13 +7,17 @@ import { ProjectMembershipRole, ProjectUserMembershipRolesSchema } from "@app/db/schemas"; -import { PROJECT_IDENTITIES } from "@app/lib/api-docs"; +import { ORGANIZATIONS, PROJECT_IDENTITIES } from "@app/lib/api-docs"; import { BadRequestError } from "@app/lib/errors"; +import { OrderByDirection } from "@app/lib/types"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { ProjectIdentityOrderBy } from "@app/services/identity-project/identity-project-types"; import { ProjectUserMembershipTemporaryMode } from "@app/services/project-membership/project-membership-types"; +import { SanitizedProjectSchema } from "../sanitizedSchemas"; + export const registerIdentityProjectRouter = async (server: FastifyZodProvider) => { server.route({ method: "POST", @@ -212,6 +216,32 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider) params: z.object({ projectId: z.string().trim().describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.projectId) }), + querystring: z.object({ + offset: z.coerce + .number() + .min(0) + .default(0) + .describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.offset) + .optional(), + limit: z.coerce + .number() + .min(1) + .max(20000) // TODO: temp limit until combobox added to add identity to project modal, reduce once added + .default(100) + .describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.limit) + .optional(), + orderBy: z + .nativeEnum(ProjectIdentityOrderBy) + .default(ProjectIdentityOrderBy.Name) + .describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderBy) + .optional(), + orderDirection: z + .nativeEnum(OrderByDirection) + .default(OrderByDirection.ASC) + .describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderDirection) + .optional(), + search: z.string().trim().describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.search).optional() + }), response: { 200: z.object({ identityMemberships: z @@ -234,21 +264,31 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider) temporaryAccessEndTime: z.date().nullable().optional() }) ), - identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }) + identity: IdentitiesSchema.pick({ name: true, id: true }).extend({ + authMethods: z.array(z.string()) + }), + project: SanitizedProjectSchema.pick({ name: true, id: true }) }) - .array() + .array(), + totalCount: z.number() }) } }, handler: async (req) => { - const identityMemberships = await server.services.identityProject.listProjectIdentities({ + const { identityMemberships, totalCount } = await server.services.identityProject.listProjectIdentities({ actor: req.permission.type, actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - projectId: req.params.projectId + projectId: req.params.projectId, + limit: req.query.limit, + offset: req.query.offset, + orderBy: req.query.orderBy, + orderDirection: req.query.orderDirection, + search: req.query.search }); - return { identityMemberships }; + + return { identityMemberships, totalCount }; } }); @@ -291,7 +331,10 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider) temporaryAccessEndTime: z.date().nullable().optional() }) ), - identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }) + identity: IdentitiesSchema.pick({ name: true, id: true }).extend({ + authMethods: z.array(z.string()) + }), + project: SanitizedProjectSchema.pick({ name: true, id: true }) }) }) } diff --git a/backend/src/server/routes/v2/mfa-router.ts b/backend/src/server/routes/v2/mfa-router.ts index 1c685866d4..6f28ec34c2 100644 --- a/backend/src/server/routes/v2/mfa-router.ts +++ b/backend/src/server/routes/v2/mfa-router.ts @@ -2,8 +2,9 @@ import jwt from "jsonwebtoken"; import { z } from "zod"; import { getConfig } from "@app/lib/config/env"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { mfaRateLimit } from "@app/server/config/rateLimiter"; -import { AuthModeMfaJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type"; +import { AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "@app/services/auth/auth-type"; export const registerMfaRouter = async (server: FastifyZodProvider) => { const cfg = getConfig(); @@ -49,6 +50,38 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/mfa/check/totp", + config: { + rateLimit: mfaRateLimit + }, + schema: { + response: { + 200: z.object({ + isVerified: z.boolean() + }) + } + }, + handler: async (req) => { + try { + const totpConfig = await server.services.totp.getUserTotpConfig({ + userId: req.mfa.userId + }); + + return { + isVerified: Boolean(totpConfig) + }; + } catch (error) { + if (error instanceof NotFoundError || error instanceof BadRequestError) { + return { isVerified: false }; + } + + throw error; + } + } + }); + server.route({ url: "/mfa/verify", method: "POST", @@ -57,7 +90,8 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => { }, schema: { body: z.object({ - mfaToken: z.string().trim() + mfaToken: z.string().trim(), + mfaMethod: z.nativeEnum(MfaMethod).optional().default(MfaMethod.EMAIL) }), response: { 200: z.object({ @@ -86,7 +120,8 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => { ip: req.realIp, userId: req.mfa.userId, orgId: req.mfa.orgId, - mfaToken: req.body.mfaToken + mfaToken: req.body.mfaToken, + mfaMethod: req.body.mfaMethod }); void res.setCookie("jid", token.refresh, { diff --git a/backend/src/server/routes/v2/organization-router.ts b/backend/src/server/routes/v2/organization-router.ts index 07074eba32..5d34bc7028 100644 --- a/backend/src/server/routes/v2/organization-router.ts +++ b/backend/src/server/routes/v2/organization-router.ts @@ -1,8 +1,15 @@ import { z } from "zod"; -import { OrganizationsSchema, OrgMembershipsSchema, UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas"; +import { + OrganizationsSchema, + OrgMembershipsSchema, + ProjectMembershipsSchema, + ProjectsSchema, + UserEncryptionKeysSchema, + UsersSchema +} from "@app/db/schemas"; import { ORGANIZATIONS } from "@app/lib/api-docs"; -import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { ActorType, AuthMode } from "@app/services/auth/auth-type"; @@ -30,6 +37,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { user: UsersSchema.pick({ username: true, email: true, + isEmailVerified: true, firstName: true, lastName: true, id: true @@ -61,7 +69,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { rateLimit: readLimit }, schema: { - description: "Return projects in organization that user is part of", + description: "Return projects in organization that user is apart of", security: [ { bearerAuth: [] @@ -103,6 +111,60 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/:organizationId/memberships/:membershipId", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Get organization user membership", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + organizationId: z.string().trim().describe(ORGANIZATIONS.GET_USER_MEMBERSHIP.organizationId), + membershipId: z.string().trim().describe(ORGANIZATIONS.GET_USER_MEMBERSHIP.membershipId) + }), + response: { + 200: z.object({ + membership: OrgMembershipsSchema.extend({ + metadata: z + .object({ + key: z.string().trim().min(1), + id: z.string().trim().min(1), + value: z.string().trim().min(1) + }) + .array() + .optional(), + user: UsersSchema.pick({ + username: true, + email: true, + isEmailVerified: true, + firstName: true, + lastName: true, + id: true + }).extend({ publicKey: z.string().nullable() }) + }).omit({ createdAt: true, updatedAt: true }) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const membership = await server.services.org.getOrgMembership({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + orgId: req.params.organizationId, + membershipId: req.params.membershipId + }); + return { membership }; + } + }); + server.route({ method: "PATCH", url: "/:organizationId/memberships/:membershipId", @@ -121,7 +183,15 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { membershipId: z.string().trim().describe(ORGANIZATIONS.UPDATE_USER_MEMBERSHIP.membershipId) }), body: z.object({ - role: z.string().trim().describe(ORGANIZATIONS.UPDATE_USER_MEMBERSHIP.role) + role: z.string().trim().optional().describe(ORGANIZATIONS.UPDATE_USER_MEMBERSHIP.role), + isActive: z.boolean().optional().describe(ORGANIZATIONS.UPDATE_USER_MEMBERSHIP.isActive), + metadata: z + .object({ + key: z.string().trim().min(1).describe(ORGANIZATIONS.UPDATE_USER_MEMBERSHIP.metadata.key), + value: z.string().trim().min(1).describe(ORGANIZATIONS.UPDATE_USER_MEMBERSHIP.metadata.value) + }) + .array() + .optional() }), response: { 200: z.object({ @@ -129,17 +199,17 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { if (req.auth.actor !== ActorType.USER) return; const membership = await server.services.org.updateOrgMembership({ userId: req.permission.id, - role: req.body.role, actorAuthMethod: req.permission.authMethod, orgId: req.params.organizationId, membershipId: req.params.membershipId, - actorOrgId: req.permission.orgId + actorOrgId: req.permission.orgId, + ...req.body }); return { membership }; } @@ -183,11 +253,74 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + // TODO: re-think endpoint structure in future so users only need to pass in membershipId bc organizationId is redundant + method: "GET", + url: "/:organizationId/memberships/:membershipId/project-memberships", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Get project memberships given organization membership", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + organizationId: z.string().trim().describe(ORGANIZATIONS.DELETE_USER_MEMBERSHIP.organizationId), + membershipId: z.string().trim().describe(ORGANIZATIONS.DELETE_USER_MEMBERSHIP.membershipId) + }), + response: { + 200: z.object({ + memberships: ProjectMembershipsSchema.extend({ + user: UsersSchema.pick({ + email: true, + username: true, + firstName: true, + lastName: true, + id: true + }).merge(UserEncryptionKeysSchema.pick({ publicKey: true })), + project: ProjectsSchema.pick({ name: true, id: true }), + roles: z.array( + z.object({ + id: z.string(), + role: z.string(), + customRoleId: z.string().optional().nullable(), + customRoleName: z.string().optional().nullable(), + customRoleSlug: z.string().optional().nullable(), + isTemporary: z.boolean(), + temporaryMode: z.string().optional().nullable(), + temporaryRange: z.string().nullable().optional(), + temporaryAccessStartTime: z.date().nullable().optional(), + temporaryAccessEndTime: z.date().nullable().optional() + }) + ) + }) + .omit({ createdAt: true, updatedAt: true }) + .array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const memberships = await server.services.org.listProjectMembershipsByOrgMembershipId({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + orgId: req.params.organizationId, + orgMembershipId: req.params.membershipId + }); + return { memberships }; + } + }); + server.route({ method: "POST", url: "/", config: { - rateLimit: creationLimit + rateLimit: writeLimit }, schema: { body: z.object({ diff --git a/backend/src/server/routes/v2/project-membership-router.ts b/backend/src/server/routes/v2/project-membership-router.ts index a9592faab3..4aa03f33c5 100644 --- a/backend/src/server/routes/v2/project-membership-router.ts +++ b/backend/src/server/routes/v2/project-membership-router.ts @@ -1,6 +1,6 @@ import { z } from "zod"; -import { ProjectMembershipsSchema } from "@app/db/schemas"; +import { OrgMembershipRole, ProjectMembershipRole, ProjectMembershipsSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { PROJECT_USERS } from "@app/lib/api-docs"; import { writeLimit } from "@app/server/config/rateLimiter"; @@ -26,7 +26,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider }), body: z.object({ emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails), - usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames) + usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames), + roleSlugs: z.string().array().optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs) }), response: { 200: z.object({ @@ -36,14 +37,21 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const memberships = await server.services.projectMembership.addUsersToProjectNonE2EE({ - projectId: req.params.projectId, + const usernamesAndEmails = [...req.body.emails, ...req.body.usernames]; + const { projectMemberships: memberships } = await server.services.org.inviteUserToOrganization({ actorAuthMethod: req.permission.authMethod, actorId: req.permission.id, actorOrgId: req.permission.orgId, actor: req.permission.type, - emails: req.body.emails, - usernames: req.body.usernames + inviteeEmails: usernamesAndEmails, + orgId: req.permission.orgId, + organizationRoleSlug: OrgMembershipRole.NoAccess, + projects: [ + { + id: req.params.projectId, + projectRoleSlug: [ProjectMembershipRole.Member] + } + ] }); await server.services.auditLog.createAuditLog({ diff --git a/backend/src/server/routes/v2/project-router.ts b/backend/src/server/routes/v2/project-router.ts index a199cf0d4f..c2aa446b47 100644 --- a/backend/src/server/routes/v2/project-router.ts +++ b/backend/src/server/routes/v2/project-router.ts @@ -1,22 +1,31 @@ import slugify from "@sindresorhus/slugify"; import { z } from "zod"; -import { ProjectKeysSchema, ProjectsSchema } from "@app/db/schemas"; +import { + CertificateAuthoritiesSchema, + CertificatesSchema, + PkiAlertsSchema, + PkiCollectionsSchema, + ProjectKeysSchema +} from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { InfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-types"; import { PROJECTS } from "@app/lib/api-docs"; -import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types"; +import { sanitizedCertificateTemplate } from "@app/services/certificate-template/certificate-template-schema"; import { ProjectFilterType } from "@app/services/project/project-types"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; -const projectWithEnv = ProjectsSchema.merge( - z.object({ - _id: z.string(), - environments: z.object({ name: z.string(), slug: z.string(), id: z.string() }).array() - }) -); +import { SanitizedProjectSchema } from "../sanitizedSchemas"; + +const projectWithEnv = SanitizedProjectSchema.extend({ + _id: z.string(), + environments: z.object({ name: z.string(), slug: z.string(), id: z.string() }).array() +}); const slugSchema = z .string() @@ -141,7 +150,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { method: "POST", url: "/", config: { - rateLimit: creationLimit + rateLimit: writeLimit }, schema: { description: "Create a new project", @@ -160,7 +169,16 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { message: "Slug must be a valid slug" }) .optional() - .describe(PROJECTS.CREATE.slug) + .describe(PROJECTS.CREATE.slug), + kmsKeyId: z.string().optional(), + template: z + .string() + .refine((v) => slugify(v) === v, { + message: "Template name must be in slug format" + }) + .optional() + .default(InfisicalProjectTemplate.Default) + .describe(PROJECTS.CREATE.template) }), response: { 200: z.object({ @@ -176,7 +194,9 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { actorOrgId: req.permission.orgId, actorAuthMethod: req.permission.authMethod, workspaceName: req.body.projectName, - slug: req.body.slug + slug: req.body.slug, + kmsKeyId: req.body.kmsKeyId, + template: req.body.template }); await server.services.telemetry.sendPostHogEvents({ @@ -189,6 +209,20 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { } }); + if (req.body.template) { + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.APPLY_PROJECT_TEMPLATE, + metadata: { + template: req.body.template, + projectId: project.id + } + } + }); + } + return { project }; } }); @@ -211,7 +245,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { slug: slugSchema.describe("The slug of the project to delete.") }), response: { - 200: ProjectsSchema + 200: SanitizedProjectSchema } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), @@ -282,7 +316,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { autoCapitalization: z.boolean().optional().describe("The new auto-capitalization setting.") }), response: { - 200: ProjectsSchema + 200: SanitizedProjectSchema } }, @@ -307,4 +341,176 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { return project; } }); + + server.route({ + method: "GET", + url: "/:slug/cas", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + slug: slugSchema.describe(PROJECTS.LIST_CAS.slug) + }), + querystring: z.object({ + status: z.enum([CaStatus.ACTIVE, CaStatus.PENDING_CERTIFICATE]).optional().describe(PROJECTS.LIST_CAS.status), + friendlyName: z.string().optional().describe(PROJECTS.LIST_CAS.friendlyName), + commonName: z.string().optional().describe(PROJECTS.LIST_CAS.commonName), + offset: z.coerce.number().min(0).max(100).default(0).describe(PROJECTS.LIST_CAS.offset), + limit: z.coerce.number().min(1).max(100).default(25).describe(PROJECTS.LIST_CAS.limit) + }), + response: { + 200: z.object({ + cas: z.array(CertificateAuthoritiesSchema) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const cas = await server.services.project.listProjectCas({ + filter: { + slug: req.params.slug, + orgId: req.permission.orgId, + type: ProjectFilterType.SLUG + }, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + ...req.query + }); + return { cas }; + } + }); + + server.route({ + method: "GET", + url: "/:slug/certificates", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + slug: slugSchema.describe(PROJECTS.LIST_CERTIFICATES.slug) + }), + querystring: z.object({ + friendlyName: z.string().optional().describe(PROJECTS.LIST_CERTIFICATES.friendlyName), + commonName: z.string().optional().describe(PROJECTS.LIST_CERTIFICATES.commonName), + offset: z.coerce.number().min(0).max(100).default(0).describe(PROJECTS.LIST_CERTIFICATES.offset), + limit: z.coerce.number().min(1).max(100).default(25).describe(PROJECTS.LIST_CERTIFICATES.limit) + }), + response: { + 200: z.object({ + certificates: z.array(CertificatesSchema), + totalCount: z.number() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { certificates, totalCount } = await server.services.project.listProjectCertificates({ + filter: { + slug: req.params.slug, + orgId: req.permission.orgId, + type: ProjectFilterType.SLUG + }, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type, + ...req.query + }); + return { certificates, totalCount }; + } + }); + + server.route({ + method: "GET", + url: "/:projectId/pki-alerts", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + projectId: z.string().trim() + }), + response: { + 200: z.object({ + alerts: z.array(PkiAlertsSchema) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { alerts } = await server.services.project.listProjectAlerts({ + projectId: req.params.projectId, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + + return { alerts }; + } + }); + + server.route({ + method: "GET", + url: "/:projectId/pki-collections", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + projectId: z.string().trim() + }), + response: { + 200: z.object({ + collections: z.array(PkiCollectionsSchema) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { pkiCollections } = await server.services.project.listProjectPkiCollections({ + projectId: req.params.projectId, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + + return { collections: pkiCollections }; + } + }); + + server.route({ + method: "GET", + url: "/:projectId/certificate-templates", + config: { + rateLimit: readLimit + }, + schema: { + params: z.object({ + projectId: z.string().trim() + }), + response: { + 200: z.object({ + certificateTemplates: sanitizedCertificateTemplate.array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { certificateTemplates } = await server.services.project.listProjectCertificateTemplates({ + projectId: req.params.projectId, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + + return { certificateTemplates }; + } + }); }; diff --git a/backend/src/server/routes/v2/user-router.ts b/backend/src/server/routes/v2/user-router.ts index 1f15008c7b..a52a45fa9a 100644 --- a/backend/src/server/routes/v2/user-router.ts +++ b/backend/src/server/routes/v2/user-router.ts @@ -4,7 +4,7 @@ import { AuthTokenSessionsSchema, OrganizationsSchema, UserEncryptionKeysSchema, import { ApiKeysSchema } from "@app/db/schemas/api-keys"; import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -import { AuthMethod, AuthMode } from "@app/services/auth/auth-type"; +import { AuthMethod, AuthMode, MfaMethod } from "@app/services/auth/auth-type"; export const registerUserRouter = async (server: FastifyZodProvider) => { server.route({ @@ -56,7 +56,8 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { }, schema: { body: z.object({ - isMfaEnabled: z.boolean() + isMfaEnabled: z.boolean().optional(), + selectedMfaMethod: z.nativeEnum(MfaMethod).optional() }), response: { 200: z.object({ @@ -66,7 +67,12 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { }, preHandler: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]), handler: async (req) => { - const user = await server.services.user.toggleUserMfa(req.permission.id, req.body.isMfaEnabled); + const user = await server.services.user.updateUserMfa({ + userId: req.permission.id, + isMfaEnabled: req.body.isMfaEnabled, + selectedMfaMethod: req.body.selectedMfaMethod + }); + return { user }; } }); @@ -255,7 +261,23 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { description: "Retrieve the current user on the request", response: { 200: z.object({ - user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true })) + user: UsersSchema.merge( + UserEncryptionKeysSchema.pick({ + clientPublicKey: true, + serverPrivateKey: true, + encryptionVersion: true, + protectedKey: true, + protectedKeyIV: true, + protectedKeyTag: true, + publicKey: true, + encryptedPrivateKey: true, + iv: true, + tag: true, + salt: true, + verifier: true, + userId: true + }) + ) }) } }, @@ -281,7 +303,7 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const user = await server.services.user.deleteMe(req.permission.id); + const user = await server.services.user.deleteUser(req.permission.id); return { user }; } }); diff --git a/backend/src/server/routes/v3/external-migration-router.ts b/backend/src/server/routes/v3/external-migration-router.ts new file mode 100644 index 0000000000..865287157e --- /dev/null +++ b/backend/src/server/routes/v3/external-migration-router.ts @@ -0,0 +1,55 @@ +import fastifyMultipart from "@fastify/multipart"; + +import { BadRequestError } from "@app/lib/errors"; +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const MB25_IN_BYTES = 26214400; + +export const registerExternalMigrationRouter = async (server: FastifyZodProvider) => { + await server.register(fastifyMultipart); + + server.route({ + method: "POST", + bodyLimit: MB25_IN_BYTES, + url: "/env-key", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const data = await req.file({ + limits: { + fileSize: MB25_IN_BYTES + } + }); + + if (!data) { + throw new BadRequestError({ message: "No file provided" }); + } + + const fullFile = Buffer.from(await data.toBuffer()).toString("utf8"); + const parsedJsonFile = JSON.parse(fullFile) as { nonce: string; data: string }; + + const decryptionKey = (data.fields.decryptionKey as { value: string }).value; + + if (!parsedJsonFile.nonce || !parsedJsonFile.data) { + throw new BadRequestError({ message: "Invalid file format. Nonce or data missing." }); + } + + if (!decryptionKey) { + throw new BadRequestError({ message: "Decryption key is required" }); + } + + await server.services.migration.importEnvKeyData({ + decryptionKey, + encryptedJson: parsedJsonFile, + actorId: req.permission.id, + actor: req.permission.type, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod + }); + } + }); +}; diff --git a/backend/src/server/routes/v3/index.ts b/backend/src/server/routes/v3/index.ts index 6a061a6350..ed8401560b 100644 --- a/backend/src/server/routes/v3/index.ts +++ b/backend/src/server/routes/v3/index.ts @@ -1,3 +1,4 @@ +import { registerExternalMigrationRouter } from "./external-migration-router"; import { registerLoginRouter } from "./login-router"; import { registerSecretBlindIndexRouter } from "./secret-blind-index-router"; import { registerSecretRouter } from "./secret-router"; @@ -10,4 +11,5 @@ export const registerV3Routes = async (server: FastifyZodProvider) => { await server.register(registerUserRouter, { prefix: "/users" }); await server.register(registerSecretRouter, { prefix: "/secrets" }); await server.register(registerSecretBlindIndexRouter, { prefix: "/workspaces" }); + await server.register(registerExternalMigrationRouter, { prefix: "/migrate" }); }; diff --git a/backend/src/server/routes/v3/login-router.ts b/backend/src/server/routes/v3/login-router.ts index 4c7df5612b..cddfc1c2b1 100644 --- a/backend/src/server/routes/v3/login-router.ts +++ b/backend/src/server/routes/v3/login-router.ts @@ -42,23 +42,34 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { }, schema: { body: z.object({ - organizationId: z.string().trim() + organizationId: z.string().trim(), + userAgent: z.enum(["cli"]).optional() }), response: { 200: z.object({ - token: z.string() + token: z.string(), + isMfaEnabled: z.boolean(), + mfaMethod: z.string().optional() }) } }, handler: async (req, res) => { const cfg = getConfig(); const tokens = await server.services.login.selectOrganization({ - userAgent: req.headers["user-agent"], + userAgent: req.body.userAgent ?? req.headers["user-agent"], authJwtToken: req.headers.authorization, organizationId: req.body.organizationId, ipAddress: req.realIp }); + if (tokens.isMfaEnabled) { + return { + token: tokens.mfa as string, + isMfaEnabled: true, + mfaMethod: tokens.mfaMethod + }; + } + void res.setCookie("jid", tokens.refresh, { httpOnly: true, path: "/", @@ -66,7 +77,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { secure: cfg.HTTPS_ENABLED }); - return { token: tokens.access }; + return { token: tokens.access, isMfaEnabled: false }; } }); @@ -81,24 +92,21 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { email: z.string().trim(), providerAuthToken: z.string().trim().optional(), clientProof: z.string().trim(), - captchaToken: z.string().trim().optional() + captchaToken: z.string().trim().optional(), + password: z.string().optional() }), response: { - 200: z.discriminatedUnion("mfaEnabled", [ - z.object({ mfaEnabled: z.literal(true), token: z.string() }), - z.object({ - mfaEnabled: z.literal(false), - encryptionVersion: z.number().default(1).nullable().optional(), - protectedKey: z.string().nullable(), - protectedKeyIV: z.string().nullable(), - protectedKeyTag: z.string().nullable(), - publicKey: z.string(), - encryptedPrivateKey: z.string(), - iv: z.string(), - tag: z.string(), - token: z.string() - }) - ]) + 200: z.object({ + encryptionVersion: z.number().default(1).nullable().optional(), + protectedKey: z.string().nullable(), + protectedKeyIV: z.string().nullable(), + protectedKeyTag: z.string().nullable(), + publicKey: z.string(), + encryptedPrivateKey: z.string(), + iv: z.string(), + tag: z.string(), + token: z.string() + }) } }, handler: async (req, res) => { @@ -112,13 +120,10 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { ip: req.realIp, userAgent, providerAuthToken: req.body.providerAuthToken, - clientProof: req.body.clientProof + clientProof: req.body.clientProof, + password: req.body.password }); - if (data.isMfaEnabled) { - return { mfaEnabled: true, token: data.token } as const; // for discriminated union - } - void res.setCookie("jid", data.token.refresh, { httpOnly: true, path: "/", @@ -127,7 +132,6 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { }); return { - mfaEnabled: false, encryptionVersion: data.user.encryptionVersion, token: data.token.access, publicKey: data.user.publicKey, diff --git a/backend/src/server/routes/v3/secret-router.ts b/backend/src/server/routes/v3/secret-router.ts index 05db617b9f..61981bef5a 100644 --- a/backend/src/server/routes/v3/secret-router.ts +++ b/backend/src/server/routes/v3/secret-router.ts @@ -8,9 +8,9 @@ import { SecretType, ServiceTokenScopes } from "@app/db/schemas"; -import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types"; import { RAW_SECRETS, SECRETS } from "@app/lib/api-docs"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { removeTrailingSlash } from "@app/lib/fn"; import { secretsLimit, writeLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; @@ -18,11 +18,23 @@ import { getUserAgentType } from "@app/server/plugins/audit-log"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { ActorType, AuthMode } from "@app/services/auth/auth-type"; import { ProjectFilterType } from "@app/services/project/project-types"; -import { SecretOperations } from "@app/services/secret/secret-types"; +import { SecretOperations, SecretProtectionType } from "@app/services/secret/secret-types"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; import { secretRawSchema } from "../sanitizedSchemas"; +const SecretReferenceNode = z.object({ + key: z.string(), + value: z.string().optional(), + environment: z.string(), + secretPath: z.string() +}); +type TSecretReferenceNode = z.infer & { children: TSecretReferenceNode[] }; + +const SecretReferenceNodeTree: z.ZodType = SecretReferenceNode.extend({ + children: z.lazy(() => SecretReferenceNodeTree.array()) +}); + export const registerSecretRouter = async (server: FastifyZodProvider) => { server.route({ method: "POST", @@ -59,9 +71,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { tags: SecretTagsSchema.pick({ id: true, slug: true, - name: true, color: true - }).array() + }) + .extend({ name: z.string() }) + .array() }) ) }) @@ -116,16 +129,15 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - secret: SecretsSchema.omit({ secretBlindIndex: true }).merge( - z.object({ - tags: SecretTagsSchema.pick({ - id: true, - slug: true, - name: true, - color: true - }).array() + secret: SecretsSchema.omit({ secretBlindIndex: true }).extend({ + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + color: true }) - ) + .extend({ name: z.string() }) + .array() + }) }) } }, @@ -180,13 +192,27 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { .enum(["true", "false"]) .default("false") .transform((value) => value === "true") - .describe(RAW_SECRETS.LIST.includeImports) + .describe(RAW_SECRETS.LIST.includeImports), + tagSlugs: z + .string() + .describe(RAW_SECRETS.LIST.tagSlugs) + .optional() + // split by comma and trim the strings + .transform((el) => (el ? el.split(",").map((i) => i.trim()) : [])) }), response: { 200: z.object({ secrets: secretRawSchema .extend({ - secretPath: z.string().optional() + secretPath: z.string().optional(), + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + color: true + }) + .extend({ name: z.string() }) + .array() + .optional() }) .array(), imports: z @@ -194,7 +220,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { secretPath: z.string(), environment: z.string(), folderId: z.string().optional(), - secrets: secretRawSchema.array() + secrets: secretRawSchema.omit({ createdAt: true, updatedAt: true }).array() }) .array() .optional() @@ -226,7 +252,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { actorOrgId: req.permission.orgId }); - if (!workspace) throw new BadRequestError({ message: `No project found with slug ${req.query.workspaceSlug}` }); + if (!workspace) throw new NotFoundError({ message: `No project found with slug ${req.query.workspaceSlug}` }); workspaceId = workspace.id; } @@ -243,7 +269,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { projectId: workspaceId, path: secretPath, includeImports: req.query.include_imports, - recursive: req.query.recursive + recursive: req.query.recursive, + tagSlugs: req.query.tagSlugs }); await server.services.auditLog.createAuditLog({ @@ -259,18 +286,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } }); - await server.services.telemetry.sendPostHogEvents({ - event: PostHogEventTypes.SecretPulled, - distinctId: getTelemetryDistinctId(req), - properties: { - numberOfSecrets: secrets.length, - workspaceId, - environment, - secretPath: req.query.secretPath, - channel: getUserAgentType(req.headers["user-agent"]), - ...req.auditLogInfo - } - }); + if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) { + await server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.SecretPulled, + distinctId: getTelemetryDistinctId(req), + properties: { + numberOfSecrets: secrets.length, + workspaceId, + environment, + secretPath: req.query.secretPath, + channel: getUserAgentType(req.headers["user-agent"]), + ...req.auditLogInfo + } + }); + } return { secrets, imports }; } }); @@ -298,6 +327,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { secretPath: z.string().trim().default("/").transform(removeTrailingSlash).describe(RAW_SECRETS.GET.secretPath), version: z.coerce.number().optional().describe(RAW_SECRETS.GET.version), type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.GET.type), + expandSecretReferences: z + .enum(["true", "false"]) + .default("false") + .transform((value) => value === "true") + .describe(RAW_SECRETS.GET.expand), include_imports: z .enum(["true", "false"]) .default("false") @@ -306,7 +340,16 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { }), response: { 200: z.object({ - secret: secretRawSchema + secret: secretRawSchema.extend({ + tags: SecretTagsSchema.pick({ + id: true, + slug: true, + color: true + }) + .extend({ name: z.string() }) + .array() + .optional() + }) }) } }, @@ -333,6 +376,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { actor: req.permission.type, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, + expandSecretReferences: req.query.expandSecretReferences, environment, projectId: workspaceId, projectSlug: workspaceSlug, @@ -358,18 +402,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } }); - await server.services.telemetry.sendPostHogEvents({ - event: PostHogEventTypes.SecretPulled, - distinctId: getTelemetryDistinctId(req), - properties: { - numberOfSecrets: 1, - workspaceId: secret.workspace, - environment, - secretPath: req.query.secretPath, - channel: getUserAgentType(req.headers["user-agent"]), - ...req.auditLogInfo - } - }); + if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) { + await server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.SecretPulled, + distinctId: getTelemetryDistinctId(req), + properties: { + numberOfSecrets: 1, + workspaceId: secret.workspace, + environment, + secretPath: req.query.secretPath, + channel: getUserAgentType(req.headers["user-agent"]), + ...req.auditLogInfo + } + }); + } return { secret }; } }); @@ -404,18 +450,28 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { .transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim())) .describe(RAW_SECRETS.CREATE.secretValue), secretComment: z.string().trim().optional().default("").describe(RAW_SECRETS.CREATE.secretComment), + tagIds: z.string().array().optional().describe(RAW_SECRETS.CREATE.tagIds), skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.CREATE.skipMultilineEncoding), - type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.CREATE.type) + type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.CREATE.type), + secretReminderRepeatDays: z + .number() + .optional() + .nullable() + .describe(RAW_SECRETS.CREATE.secretReminderRepeatDays), + secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.CREATE.secretReminderNote) }), response: { - 200: z.object({ - secret: secretRawSchema - }) + 200: z.union([ + z.object({ + secret: secretRawSchema + }), + z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled") + ]) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const secret = await server.services.secret.createSecretRaw({ + const secretOperation = await server.services.secret.createSecretRaw({ actorId: req.permission.id, actor: req.permission.type, actorOrgId: req.permission.orgId, @@ -427,9 +483,16 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { type: req.body.type, secretValue: req.body.secretValue, skipMultilineEncoding: req.body.skipMultilineEncoding, - secretComment: req.body.secretComment + secretComment: req.body.secretComment, + tagIds: req.body.tagIds, + secretReminderNote: req.body.secretReminderNote, + secretReminderRepeatDays: req.body.secretReminderRepeatDays }); + if (secretOperation.type === SecretProtectionType.Approval) { + return { approval: secretOperation.approval }; + } + const { secret } = secretOperation; await server.services.auditLog.createAuditLog({ projectId: req.body.workspaceId, ...req.auditLogInfo, @@ -492,17 +555,30 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { .transform(removeTrailingSlash) .describe(RAW_SECRETS.UPDATE.secretPath), skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding), - type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.UPDATE.type) + type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.UPDATE.type), + tagIds: z.string().array().optional().describe(RAW_SECRETS.UPDATE.tagIds), + metadata: z.record(z.string()).optional(), + secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.UPDATE.secretReminderNote), + secretReminderRepeatDays: z + .number() + .optional() + .nullable() + .describe(RAW_SECRETS.UPDATE.secretReminderRepeatDays), + newSecretName: z.string().min(1).optional().describe(RAW_SECRETS.UPDATE.newSecretName), + secretComment: z.string().optional().describe(RAW_SECRETS.UPDATE.secretComment) }), response: { - 200: z.object({ - secret: secretRawSchema - }) + 200: z.union([ + z.object({ + secret: secretRawSchema + }), + z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled") + ]) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const secret = await server.services.secret.updateSecretRaw({ + const secretOperation = await server.services.secret.updateSecretRaw({ actorId: req.permission.id, actor: req.permission.type, actorOrgId: req.permission.orgId, @@ -513,8 +589,18 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { secretName: req.params.secretName, type: req.body.type, secretValue: req.body.secretValue, - skipMultilineEncoding: req.body.skipMultilineEncoding + skipMultilineEncoding: req.body.skipMultilineEncoding, + tagIds: req.body.tagIds, + secretReminderRepeatDays: req.body.secretReminderRepeatDays, + secretReminderNote: req.body.secretReminderNote, + metadata: req.body.metadata, + newSecretName: req.body.newSecretName, + secretComment: req.body.secretComment }); + if (secretOperation.type === SecretProtectionType.Approval) { + return { approval: secretOperation.approval }; + } + const { secret } = secretOperation; await server.services.auditLog.createAuditLog({ projectId: req.body.workspaceId, @@ -575,14 +661,17 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.DELETE.type) }), response: { - 200: z.object({ - secret: secretRawSchema - }) + 200: z.union([ + z.object({ + secret: secretRawSchema + }), + z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled") + ]) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const secret = await server.services.secret.deleteSecretRaw({ + const secretOperation = await server.services.secret.deleteSecretRaw({ actorId: req.permission.id, actor: req.permission.type, actorAuthMethod: req.permission.authMethod, @@ -593,6 +682,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { secretName: req.params.secretName, type: req.body.type }); + if (secretOperation.type === SecretProtectionType.Approval) { + return { approval: secretOperation.approval }; + } + const { secret } = secretOperation; await server.services.auditLog.createAuditLog({ projectId: req.body.workspaceId, @@ -657,9 +750,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { tags: SecretTagsSchema.pick({ id: true, slug: true, - name: true, color: true - }).array() + }) + .extend({ name: z.string() }) + .array() }) .array(), imports: z @@ -710,24 +804,22 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { }); // TODO: Move to telemetry plugin - let shouldRecordK8Event = false; - if (req.headers["user-agent"] === "k8-operatoer") { - const randomNumber = Math.random(); - if (randomNumber > 0.95) { - shouldRecordK8Event = true; - } - } + // let shouldRecordK8Event = false; + // if (req.headers["user-agent"] === "k8-operatoer") { + // const randomNumber = Math.random(); + // if (randomNumber > 0.95) { + // shouldRecordK8Event = true; + // } + // } const shouldCapture = - req.query.workspaceId !== "650e71fbae3e6c8572f436d4" && - (req.headers["user-agent"] !== "k8-operator" || shouldRecordK8Event); - const approximateNumberTotalSecrets = secrets.length * 20; + req.query.workspaceId !== "650e71fbae3e6c8572f436d4" && req.headers["user-agent"] !== "k8-operator"; if (shouldCapture) { await server.services.telemetry.sendPostHogEvents({ event: PostHogEventTypes.SecretPulled, distinctId: getTelemetryDistinctId(req), properties: { - numberOfSecrets: shouldRecordK8Event ? approximateNumberTotalSecrets : secrets.length, + numberOfSecrets: secrets.length, workspaceId: req.query.workspaceId, environment: req.query.environment, secretPath: req.query.secretPath, @@ -804,18 +896,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } }); - await server.services.telemetry.sendPostHogEvents({ - event: PostHogEventTypes.SecretPulled, - distinctId: getTelemetryDistinctId(req), - properties: { - numberOfSecrets: 1, - workspaceId: req.query.workspaceId, - environment: req.query.environment, - secretPath: req.query.secretPath, - channel: getUserAgentType(req.headers["user-agent"]), - ...req.auditLogInfo - } - }); + if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) { + await server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.SecretPulled, + distinctId: getTelemetryDistinctId(req), + properties: { + numberOfSecrets: 1, + workspaceId: req.query.workspaceId, + environment: req.query.environment, + secretPath: req.query.secretPath, + channel: getUserAgentType(req.headers["user-agent"]), + ...req.auditLogInfo + } + }); + } return { secret }; } }); @@ -926,7 +1020,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { event: { type: EventType.SECRET_APPROVAL_REQUEST, metadata: { - committedBy: approval.committerId, + committedBy: approval.committerUserId, secretApprovalRequestId: approval.id, secretApprovalRequestSlug: approval.slug } @@ -1110,7 +1204,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { event: { type: EventType.SECRET_APPROVAL_REQUEST, metadata: { - committedBy: approval.committerId, + committedBy: approval.committerUserId, secretApprovalRequestId: approval.id, secretApprovalRequestSlug: approval.slug } @@ -1248,7 +1342,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { event: { type: EventType.SECRET_APPROVAL_REQUEST, metadata: { - committedBy: approval.committerId, + committedBy: approval.committerUserId, secretApprovalRequestId: approval.id, secretApprovalRequestSlug: approval.slug } @@ -1302,6 +1396,61 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "POST", + url: "/move", + config: { + rateLimit: secretsLimit + }, + schema: { + body: z.object({ + projectSlug: z.string().trim(), + sourceEnvironment: z.string().trim(), + sourceSecretPath: z.string().trim().default("/").transform(removeTrailingSlash), + destinationEnvironment: z.string().trim(), + destinationSecretPath: z.string().trim().default("/").transform(removeTrailingSlash), + secretIds: z.string().array(), + shouldOverwrite: z.boolean().default(false) + }), + response: { + 200: z.object({ + isSourceUpdated: z.boolean(), + isDestinationUpdated: z.boolean() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { projectId, isSourceUpdated, isDestinationUpdated } = await server.services.secret.moveSecrets({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.body + }); + + await server.services.auditLog.createAuditLog({ + projectId, + ...req.auditLogInfo, + event: { + type: EventType.MOVE_SECRETS, + metadata: { + sourceEnvironment: req.body.sourceEnvironment, + sourceSecretPath: req.body.sourceSecretPath, + destinationEnvironment: req.body.destinationEnvironment, + destinationSecretPath: req.body.destinationSecretPath, + secretIds: req.body.secretIds + } + } + }); + + return { + isSourceUpdated, + isDestinationUpdated + }; + } + }); + server.route({ method: "POST", url: "/batch", @@ -1374,7 +1523,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { event: { type: EventType.SECRET_APPROVAL_REQUEST, metadata: { - committedBy: approval.committerId, + committedBy: approval.committerUserId, secretApprovalRequestId: approval.id, secretApprovalRequestSlug: approval.slug } @@ -1501,7 +1650,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { event: { type: EventType.SECRET_APPROVAL_REQUEST, metadata: { - committedBy: approval.committerId, + committedBy: approval.committerUserId, secretApprovalRequestId: approval.id, secretApprovalRequestSlug: approval.slug } @@ -1615,7 +1764,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { event: { type: EventType.SECRET_APPROVAL_REQUEST, metadata: { - committedBy: approval.committerId, + committedBy: approval.committerUserId, secretApprovalRequestId: approval.id, secretApprovalRequestSlug: approval.slug } @@ -1682,7 +1831,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } ], body: z.object({ - projectSlug: z.string().trim().describe(RAW_SECRETS.CREATE.projectSlug), + projectSlug: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.projectSlug), + workspaceId: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.workspaceId), environment: z.string().trim().describe(RAW_SECRETS.CREATE.environment), secretPath: z .string() @@ -1698,22 +1848,27 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { .transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim())) .describe(RAW_SECRETS.CREATE.secretValue), secretComment: z.string().trim().optional().default("").describe(RAW_SECRETS.CREATE.secretComment), - skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.CREATE.skipMultilineEncoding) + skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.CREATE.skipMultilineEncoding), + metadata: z.record(z.string()).optional(), + tagIds: z.string().array().optional().describe(RAW_SECRETS.CREATE.tagIds) }) .array() .min(1) }), response: { - 200: z.object({ - secrets: secretRawSchema.array() - }) + 200: z.union([ + z.object({ + secrets: secretRawSchema.array() + }), + z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled") + ]) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body; - const secrets = await server.services.secret.createManySecretsRaw({ + const secretOperation = await server.services.secret.createManySecretsRaw({ actorId: req.permission.id, actor: req.permission.type, actorAuthMethod: req.permission.authMethod, @@ -1721,8 +1876,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { secretPath, environment, projectSlug, + projectId: req.body.workspaceId, secrets: inputSecrets }); + if (secretOperation.type === SecretProtectionType.Approval) { + return { approval: secretOperation.approval }; + } + const { secrets } = secretOperation; await server.services.auditLog.createAuditLog({ projectId: secrets[0].workspace, @@ -1732,9 +1892,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { metadata: { environment: req.body.environment, secretPath: req.body.secretPath, - secrets: secrets.map((secret, i) => ({ + secrets: secrets.map((secret) => ({ secretId: secret.id, - secretKey: inputSecrets[i].secretKey, + secretKey: secret.secretKey, secretVersion: secret.version })) } @@ -1771,7 +1931,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } ], body: z.object({ - projectSlug: z.string().trim().describe(RAW_SECRETS.UPDATE.projectSlug), + projectSlug: z.string().trim().optional().describe(RAW_SECRETS.DELETE.projectSlug), + workspaceId: z.string().trim().optional().describe(RAW_SECRETS.DELETE.workspaceId), environment: z.string().trim().describe(RAW_SECRETS.UPDATE.environment), secretPath: z .string() @@ -1787,21 +1948,32 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { .transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim())) .describe(RAW_SECRETS.UPDATE.secretValue), secretComment: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.secretComment), - skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding) + skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding), + newSecretName: z.string().min(1).optional().describe(RAW_SECRETS.UPDATE.newSecretName), + tagIds: z.string().array().optional().describe(RAW_SECRETS.UPDATE.tagIds), + secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.UPDATE.secretReminderNote), + secretReminderRepeatDays: z + .number() + .optional() + .nullable() + .describe(RAW_SECRETS.UPDATE.secretReminderRepeatDays) }) .array() .min(1) }), response: { - 200: z.object({ - secrets: secretRawSchema.array() - }) + 200: z.union([ + z.object({ + secrets: secretRawSchema.array() + }), + z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled") + ]) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body; - const secrets = await server.services.secret.updateManySecretsRaw({ + const secretOperation = await server.services.secret.updateManySecretsRaw({ actorId: req.permission.id, actor: req.permission.type, actorAuthMethod: req.permission.authMethod, @@ -1809,8 +1981,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { secretPath, environment, projectSlug, + projectId: req.body.workspaceId, secrets: inputSecrets }); + if (secretOperation.type === SecretProtectionType.Approval) { + return { approval: secretOperation.approval }; + } + const { secrets } = secretOperation; await server.services.auditLog.createAuditLog({ projectId: secrets[0].workspace, @@ -1820,9 +1997,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { metadata: { environment: req.body.environment, secretPath: req.body.secretPath, - secrets: secrets.map((secret, i) => ({ + secrets: secrets.map((secret) => ({ secretId: secret.id, - secretKey: inputSecrets[i].secretKey, + secretKey: secret.secretKey, secretVersion: secret.version })) } @@ -1859,7 +2036,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } ], body: z.object({ - projectSlug: z.string().trim().describe(RAW_SECRETS.DELETE.projectSlug), + projectSlug: z.string().trim().optional().describe(RAW_SECRETS.DELETE.projectSlug), + workspaceId: z.string().trim().optional().describe(RAW_SECRETS.DELETE.workspaceId), environment: z.string().trim().describe(RAW_SECRETS.DELETE.environment), secretPath: z .string() @@ -1869,21 +2047,25 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { .describe(RAW_SECRETS.DELETE.secretPath), secrets: z .object({ - secretKey: z.string().trim().describe(RAW_SECRETS.DELETE.secretName) + secretKey: z.string().trim().describe(RAW_SECRETS.DELETE.secretName), + type: z.nativeEnum(SecretType).default(SecretType.Shared) }) .array() .min(1) }), response: { - 200: z.object({ - secrets: secretRawSchema.array() - }) + 200: z.union([ + z.object({ + secrets: secretRawSchema.array() + }), + z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled") + ]) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body; - const secrets = await server.services.secret.deleteManySecretsRaw({ + const secretOperation = await server.services.secret.deleteManySecretsRaw({ actorId: req.permission.id, actor: req.permission.type, actorAuthMethod: req.permission.authMethod, @@ -1891,8 +2073,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { environment, projectSlug, secretPath, + projectId: req.body.workspaceId, secrets: inputSecrets }); + if (secretOperation.type === SecretProtectionType.Approval) { + return { approval: secretOperation.approval }; + } + const { secrets } = secretOperation; await server.services.auditLog.createAuditLog({ projectId: secrets[0].workspace, @@ -1902,9 +2089,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { metadata: { environment: req.body.environment, secretPath: req.body.secretPath, - secrets: secrets.map((secret, i) => ({ + secrets: secrets.map((secret) => ({ secretId: secret.id, - secretKey: inputSecrets[i].secretKey, + secretKey: secret.secretKey, secretVersion: secret.version })) } @@ -1927,6 +2114,58 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/raw/:secretName/secret-reference-tree", + config: { + rateLimit: secretsLimit + }, + schema: { + description: "Get secret reference tree", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + secretName: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.secretName) + }), + querystring: z.object({ + workspaceId: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.workspaceId), + environment: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.environment), + secretPath: z + .string() + .trim() + .default("/") + .transform(removeTrailingSlash) + .describe(RAW_SECRETS.GET_REFERENCE_TREE.secretPath) + }), + response: { + 200: z.object({ + tree: SecretReferenceNodeTree, + value: z.string().optional() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { secretName } = req.params; + const { secretPath, environment, workspaceId } = req.query; + const { tree, value } = await server.services.secret.getSecretReferenceTree({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId: workspaceId, + secretName, + secretPath, + environment + }); + + return { tree, value }; + } + }); + server.route({ method: "POST", url: "/backfill-secret-references", diff --git a/backend/src/server/routes/v3/signup-router.ts b/backend/src/server/routes/v3/signup-router.ts index ac43df36de..d801e85ef0 100644 --- a/backend/src/server/routes/v3/signup-router.ts +++ b/backend/src/server/routes/v3/signup-router.ts @@ -2,7 +2,7 @@ import { z } from "zod"; import { UsersSchema } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError } from "@app/lib/errors"; +import { ForbiddenRequestError } from "@app/lib/errors"; import { authRateLimit } from "@app/server/config/rateLimiter"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; @@ -29,8 +29,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { const serverCfg = await getServerCfg(); if (!serverCfg.allowSignUp) { - throw new BadRequestError({ - message: "Sign up is disabled" + throw new ForbiddenRequestError({ + message: "Signup's are disabled" }); } @@ -38,7 +38,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { const domain = email.split("@")[1]; const allowedDomains = serverCfg.allowedSignUpDomain.split(",").map((e) => e.trim()); if (!allowedDomains.includes(domain)) { - throw new BadRequestError({ + throw new ForbiddenRequestError({ message: `Email with a domain (@${domain}) is not supported` }); } @@ -70,13 +70,13 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { handler: async (req) => { const serverCfg = await getServerCfg(); if (!serverCfg.allowSignUp) { - throw new BadRequestError({ - message: "Sign up is disabled" + throw new ForbiddenRequestError({ + message: "Signup's are disabled" }); } const { token, user } = await server.services.signup.verifyEmailSignup(req.body.email, req.body.code); - return { message: "Successfuly verified email", token, user }; + return { message: "Successfully verified email", token, user }; } }); @@ -102,7 +102,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { verifier: z.string().trim(), organizationName: z.string().trim().min(1), providerAuthToken: z.string().trim().optional().nullish(), - attributionSource: z.string().trim().optional() + attributionSource: z.string().trim().optional(), + password: z.string() }), response: { 200: z.object({ @@ -120,8 +121,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { const serverCfg = await getServerCfg(); if (!serverCfg.allowSignUp) { - throw new BadRequestError({ - message: "Sign up is disabled" + throw new ForbiddenRequestError({ + message: "Signup's are disabled" }); } @@ -167,6 +168,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { schema: { body: z.object({ email: z.string().email().trim(), + password: z.string(), firstName: z.string().trim(), lastName: z.string().trim().optional(), protectedKey: z.string().trim(), @@ -177,7 +179,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { encryptedPrivateKeyIV: z.string().trim(), encryptedPrivateKeyTag: z.string().trim(), salt: z.string().trim(), - verifier: z.string().trim() + verifier: z.string().trim(), + tokenMetadata: z.string().optional() }), response: { 200: z.object({ diff --git a/backend/src/services/access-token-queue/access-token-queue.ts b/backend/src/services/access-token-queue/access-token-queue.ts new file mode 100644 index 0000000000..bb1fefef60 --- /dev/null +++ b/backend/src/services/access-token-queue/access-token-queue.ts @@ -0,0 +1,125 @@ +import { z } from "zod"; + +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; +import { applyJitter, secondsToMillis } from "@app/lib/dates"; +import { logger } from "@app/lib/logger"; +import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; + +import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; +import { TServiceTokenDALFactory } from "../service-token/service-token-dal"; + +type TAccessTokenQueueServiceFactoryDep = { + queueService: TQueueServiceFactory; + keyStore: Pick; + identityAccessTokenDAL: Pick; + serviceTokenDAL: Pick; +}; + +export type TAccessTokenQueueServiceFactory = ReturnType; + +export const AccessTokenStatusSchema = z.object({ + lastUpdatedAt: z.string().datetime(), + numberOfUses: z.number() +}); + +export const accessTokenQueueServiceFactory = ({ + queueService, + keyStore, + identityAccessTokenDAL, + serviceTokenDAL +}: TAccessTokenQueueServiceFactoryDep) => { + const getIdentityTokenDetailsInCache = async (identityAccessTokenId: string) => { + const tokenDetailsInCache = await keyStore.getItem( + KeyStorePrefixes.IdentityAccessTokenStatusUpdate(identityAccessTokenId) + ); + if (tokenDetailsInCache) { + return AccessTokenStatusSchema.parseAsync(JSON.parse(tokenDetailsInCache)); + } + }; + + const updateServiceTokenStatus = async (serviceTokenId: string) => { + await keyStore.setItemWithExpiry( + KeyStorePrefixes.ServiceTokenStatusUpdate(serviceTokenId), + KeyStoreTtls.AccessTokenStatusUpdateInSeconds, + JSON.stringify({ lastUpdatedAt: new Date() }) + ); + await queueService.queue( + QueueName.AccessTokenStatusUpdate, + QueueJobs.ServiceTokenStatusUpdate, + { + serviceTokenId + }, + { + delay: applyJitter(secondsToMillis(KeyStoreTtls.AccessTokenStatusUpdateInSeconds / 2), secondsToMillis(10)), + // https://docs.bullmq.io/guide/jobs/job-ids + jobId: KeyStorePrefixes.ServiceTokenStatusUpdate(serviceTokenId).replaceAll(":", "_"), + removeOnFail: true, + removeOnComplete: true + } + ); + }; + + const updateIdentityAccessTokenStatus = async (identityAccessTokenId: string, numberOfUses: number) => { + await keyStore.setItemWithExpiry( + KeyStorePrefixes.IdentityAccessTokenStatusUpdate(identityAccessTokenId), + KeyStoreTtls.AccessTokenStatusUpdateInSeconds, + JSON.stringify({ lastUpdatedAt: new Date(), numberOfUses }) + ); + await queueService.queue( + QueueName.AccessTokenStatusUpdate, + QueueJobs.IdentityAccessTokenStatusUpdate, + { + identityAccessTokenId, + numberOfUses + }, + { + delay: applyJitter(secondsToMillis(KeyStoreTtls.AccessTokenStatusUpdateInSeconds / 2), secondsToMillis(10)), + jobId: KeyStorePrefixes.IdentityAccessTokenStatusUpdate(identityAccessTokenId).replaceAll(":", "_"), + removeOnFail: true, + removeOnComplete: true + } + ); + }; + + queueService.start(QueueName.AccessTokenStatusUpdate, async (job) => { + // for identity token update + if (job.name === QueueJobs.IdentityAccessTokenStatusUpdate && "identityAccessTokenId" in job.data) { + const { identityAccessTokenId } = job.data; + const tokenDetails = { lastUpdatedAt: new Date(job.timestamp), numberOfUses: job.data.numberOfUses }; + const tokenDetailsInCache = await getIdentityTokenDetailsInCache(identityAccessTokenId); + if (tokenDetailsInCache) { + tokenDetails.numberOfUses = tokenDetailsInCache.numberOfUses; + tokenDetails.lastUpdatedAt = new Date(tokenDetailsInCache.lastUpdatedAt); + } + + await identityAccessTokenDAL.updateById(identityAccessTokenId, { + accessTokenLastUsedAt: tokenDetails.lastUpdatedAt, + accessTokenNumUses: tokenDetails.numberOfUses + }); + return; + } + + // for service token + if (job.name === QueueJobs.ServiceTokenStatusUpdate && "serviceTokenId" in job.data) { + const { serviceTokenId } = job.data; + const tokenDetailsInCache = await keyStore.getItem(KeyStorePrefixes.ServiceTokenStatusUpdate(serviceTokenId)); + let lastUsed = new Date(job.timestamp); + if (tokenDetailsInCache) { + const tokenDetails = await AccessTokenStatusSchema.pick({ lastUpdatedAt: true }).parseAsync( + JSON.parse(tokenDetailsInCache) + ); + lastUsed = new Date(tokenDetails.lastUpdatedAt); + } + + await serviceTokenDAL.updateById(serviceTokenId, { + lastUsed + }); + } + }); + + queueService.listen(QueueName.AccessTokenStatusUpdate, "failed", (_, err) => { + logger.error(err, `${QueueName.AccessTokenStatusUpdate}: Failed to updated access token status`); + }); + + return { updateIdentityAccessTokenStatus, updateServiceTokenStatus, getIdentityTokenDetailsInCache }; +}; diff --git a/backend/src/services/api-key/api-key-service.ts b/backend/src/services/api-key/api-key-service.ts index 39ccbecce7..96fb90026e 100644 --- a/backend/src/services/api-key/api-key-service.ts +++ b/backend/src/services/api-key/api-key-service.ts @@ -4,7 +4,7 @@ import bcrypt from "bcrypt"; import { TApiKeys } from "@app/db/schemas/api-keys"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { TUserDALFactory } from "../user/user-dal"; import { TApiKeyDALFactory } from "./api-key-dal"; @@ -45,7 +45,7 @@ export const apiKeyServiceFactory = ({ apiKeyDAL, userDAL }: TApiKeyServiceFacto const deleteApiKey = async (userId: string, apiKeyId: string) => { const [apiKeyData] = await apiKeyDAL.delete({ id: apiKeyId, userId }); - if (!apiKeyData) throw new BadRequestError({ message: "Failed to find api key", name: "delete api key" }); + if (!apiKeyData) throw new NotFoundError({ message: `API key with ID '${apiKeyId}' not found` }); return formatApiKey(apiKeyData); }; diff --git a/backend/src/services/auth-token/auth-token-dal.ts b/backend/src/services/auth-token/auth-token-dal.ts index 075ae73846..c058c13e8e 100644 --- a/backend/src/services/auth-token/auth-token-dal.ts +++ b/backend/src/services/auth-token/auth-token-dal.ts @@ -14,7 +14,7 @@ export const tokenDALFactory = (db: TDbClient) => { const findOneTokenSession = async (filter: Partial): Promise => { try { - const doc = await db(TableName.AuthTokenSession).where(filter).first(); + const doc = await db.replicaNode()(TableName.AuthTokenSession).where(filter).first(); return doc; } catch (error) { throw new DatabaseError({ error, name: "FindOneTokenSession" }); @@ -44,7 +44,7 @@ export const tokenDALFactory = (db: TDbClient) => { const findTokenSessions = async (filter: Partial, tx?: Knex) => { try { - const sessions = await (tx || db)(TableName.AuthTokenSession).where(filter); + const sessions = await (tx || db.replicaNode())(TableName.AuthTokenSession).where(filter); return sessions; } catch (error) { throw new DatabaseError({ name: "Find all token session", error }); diff --git a/backend/src/services/auth-token/auth-token-service.ts b/backend/src/services/auth-token/auth-token-service.ts index b1f8aa2f65..321abb5b3c 100644 --- a/backend/src/services/auth-token/auth-token-service.ts +++ b/backend/src/services/auth-token/auth-token-service.ts @@ -4,7 +4,8 @@ import bcrypt from "bcrypt"; import { TAuthTokens, TAuthTokenSessions } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; -import { UnauthorizedError } from "@app/lib/errors"; +import { ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; import { AuthModeJwtTokenPayload } from "../auth/auth-type"; import { TUserDALFactory } from "../user/user-dal"; @@ -14,6 +15,7 @@ import { TCreateTokenForUserDTO, TIssueAuthTokenDTO, TokenType, TValidateTokenFo type TAuthTokenServiceFactoryDep = { tokenDAL: TTokenDALFactory; userDAL: Pick; + orgMembershipDAL: Pick; }; export type TAuthTokenServiceFactory = ReturnType; @@ -67,7 +69,7 @@ export const getTokenConfig = (tokenType: TokenType) => { } }; -export const tokenServiceFactory = ({ tokenDAL, userDAL }: TAuthTokenServiceFactoryDep) => { +export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAuthTokenServiceFactoryDep) => { const createTokenForUser = async ({ type, userId, orgId }: TCreateTokenForUserDTO) => { const { token, ...tkCfg } = getTokenConfig(type); const appCfg = getConfig(); @@ -148,11 +150,27 @@ export const tokenServiceFactory = ({ tokenDAL, userDAL }: TAuthTokenServiceFact id: token.tokenVersionId, userId: token.userId }); - if (!session) throw new UnauthorizedError({ name: "Session not found" }); - if (token.accessVersion !== session.accessVersion) throw new UnauthorizedError({ name: "Stale session" }); + if (!session) throw new NotFoundError({ name: "Session not found" }); + if (token.accessVersion !== session.accessVersion) { + throw new UnauthorizedError({ name: "StaleSession", message: "User session is stale, please re-authenticate" }); + } const user = await userDAL.findById(session.userId); - if (!user || !user.isAccepted) throw new UnauthorizedError({ name: "Token user not found" }); + if (!user || !user.isAccepted) throw new NotFoundError({ message: `User with ID '${session.userId}' not found` }); + + if (token.organizationId) { + const orgMembership = await orgMembershipDAL.findOne({ + userId: user.id, + orgId: token.organizationId + }); + + if (!orgMembership) { + throw new ForbiddenRequestError({ message: "User not member of organization" }); + } + if (!orgMembership.isActive) { + throw new ForbiddenRequestError({ message: "User organization membership is inactive" }); + } + } return { user, tokenVersionId: token.tokenVersionId, orgId: token.organizationId }; }; diff --git a/backend/src/services/auth-token/auth-token-types.ts b/backend/src/services/auth-token/auth-token-types.ts index 8917bd6721..65d16850a9 100644 --- a/backend/src/services/auth-token/auth-token-types.ts +++ b/backend/src/services/auth-token/auth-token-types.ts @@ -1,3 +1,5 @@ +import { ProjectMembershipRole } from "@app/db/schemas"; + export enum TokenType { TOKEN_EMAIL_CONFIRMATION = "emailConfirmation", TOKEN_EMAIL_VERIFICATION = "emailVerification", // unverified -> verified @@ -49,3 +51,19 @@ export type TIssueAuthTokenDTO = { ip: string; userAgent: string; }; + +export enum TokenMetadataType { + InviteToProjects = "projects-invite" +} + +export type TTokenInviteToProjectsMetadataPayload = { + projectIds: string[]; + projectRoleSlug: ProjectMembershipRole; + userId: string; + orgId: string; +}; + +export type TTokenMetadata = { + type: TokenMetadataType.InviteToProjects; + payload: TTokenInviteToProjectsMetadataPayload; +}; diff --git a/backend/src/services/auth/auth-fns.ts b/backend/src/services/auth/auth-fns.ts index ecbf73a485..5f7aca8128 100644 --- a/backend/src/services/auth/auth-fns.ts +++ b/backend/src/services/auth/auth-fns.ts @@ -1,7 +1,7 @@ import jwt from "jsonwebtoken"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors"; import { AuthModeProviderJwtTokenPayload, AuthModeProviderSignUpTokenPayload, AuthTokenType } from "./auth-type"; @@ -15,25 +15,25 @@ export const validateProviderAuthToken = (providerToken: string, username?: stri if (decodedToken.username !== username) throw new Error("Invalid auth credentials"); if (decodedToken.organizationId) { - return { orgId: decodedToken.organizationId, authMethod: decodedToken.authMethod }; + return { orgId: decodedToken.organizationId, authMethod: decodedToken.authMethod, userName: decodedToken.username }; } - return { authMethod: decodedToken.authMethod, orgId: null }; + return { authMethod: decodedToken.authMethod, orgId: null, userName: decodedToken.username }; }; export const validateSignUpAuthorization = (token: string, userId: string, validate = true) => { const appCfg = getConfig(); const [AUTH_TOKEN_TYPE, AUTH_TOKEN_VALUE] = <[string, string]>token?.split(" ", 2) ?? [null, null]; if (AUTH_TOKEN_TYPE === null) { - throw new BadRequestError({ message: "Missing Authorization Header in the request header." }); + throw new UnauthorizedError({ message: "Missing Authorization Header in the request header." }); } if (AUTH_TOKEN_TYPE.toLowerCase() !== "bearer") { - throw new BadRequestError({ + throw new UnauthorizedError({ message: `The provided authentication type '${AUTH_TOKEN_TYPE}' is not supported.` }); } if (AUTH_TOKEN_VALUE === null) { - throw new BadRequestError({ + throw new UnauthorizedError({ message: "Missing Authorization Body in the request header" }); } @@ -47,8 +47,8 @@ export const validateSignUpAuthorization = (token: string, userId: string, valid export const enforceUserLockStatus = (isLocked: boolean, temporaryLockDateEnd?: Date | null) => { if (isLocked) { - throw new UnauthorizedError({ - name: "User Locked", + throw new ForbiddenRequestError({ + name: "UserLocked", message: "User is locked due to multiple failed login attempts. An email has been sent to you in order to unlock your account. You can also reset your password to unlock your account." }); @@ -61,8 +61,8 @@ export const enforceUserLockStatus = (isLocked: boolean, temporaryLockDateEnd?: const timeDisplay = secondsDiff > 60 ? `${Math.ceil(secondsDiff / 60)} minutes` : `${Math.ceil(secondsDiff)} seconds`; - throw new UnauthorizedError({ - name: "User Locked", + throw new ForbiddenRequestError({ + name: "UserLocked", message: `User is temporary locked due to multiple failed login attempts. Try again after ${timeDisplay}. You can also reset your password now to proceed.` }); } diff --git a/backend/src/services/auth/auth-login-service.ts b/backend/src/services/auth/auth-login-service.ts index a136508e7f..dea41e60b1 100644 --- a/backend/src/services/auth/auth-login-service.ts +++ b/backend/src/services/auth/auth-login-service.ts @@ -1,3 +1,4 @@ +import bcrypt from "bcrypt"; import jwt from "jsonwebtoken"; import { TUsers, UserDeviceSchema } from "@app/db/schemas"; @@ -5,30 +6,35 @@ import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns"; import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto"; -import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/errors"; +import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; +import { getUserPrivateKey } from "@app/lib/crypto/srp"; +import { BadRequestError, DatabaseError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; -import { TTokenDALFactory } from "../auth-token/auth-token-dal"; import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service"; import { TokenType } from "../auth-token/auth-token-types"; import { TOrgDALFactory } from "../org/org-dal"; import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; +import { LoginMethod } from "../super-admin/super-admin-types"; +import { TTotpServiceFactory } from "../totp/totp-service"; import { TUserDALFactory } from "../user/user-dal"; import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns"; import { TLoginClientProofDTO, TLoginGenServerPublicKeyDTO, TOauthLoginDTO, + TOauthTokenExchangeDTO, TVerifyMfaTokenDTO } from "./auth-login-type"; -import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType } from "./auth-type"; +import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "./auth-type"; type TAuthLoginServiceFactoryDep = { userDAL: TUserDALFactory; orgDAL: TOrgDALFactory; tokenService: TAuthTokenServiceFactory; smtpService: TSmtpService; - tokenDAL: TTokenDALFactory; + totpService: Pick; }; export type TAuthLoginFactory = ReturnType; @@ -37,7 +43,7 @@ export const authLoginServiceFactory = ({ tokenService, smtpService, orgDAL, - tokenDAL + totpService }: TAuthLoginServiceFactoryDep) => { /* * Private @@ -96,13 +102,17 @@ export const authLoginServiceFactory = ({ ip, userAgent, organizationId, - authMethod + authMethod, + isMfaVerified, + mfaMethod }: { user: TUsers; ip: string; userAgent: string; - organizationId: string | undefined; + organizationId?: string; authMethod: AuthMethod; + isMfaVerified?: boolean; + mfaMethod?: MfaMethod; }) => { const cfg = getConfig(); await updateUserDeviceSession(user, ip, userAgent); @@ -120,7 +130,9 @@ export const authLoginServiceFactory = ({ userId: user.id, tokenVersionId: tokenSession.id, accessVersion: tokenSession.accessVersion, - organizationId + organizationId, + isMfaVerified, + mfaMethod }, cfg.AUTH_SECRET, { expiresIn: cfg.JWT_AUTH_LIFETIME } @@ -133,7 +145,9 @@ export const authLoginServiceFactory = ({ userId: user.id, tokenVersionId: tokenSession.id, refreshVersion: tokenSession.refreshVersion, - organizationId + organizationId, + isMfaVerified, + mfaMethod }, cfg.AUTH_SECRET, { expiresIn: cfg.JWT_REFRESH_LIFETIME } @@ -153,9 +167,22 @@ export const authLoginServiceFactory = ({ const userEnc = await userDAL.findUserEncKeyByUsername({ username: email }); + const serverCfg = await getServerCfg(); + + if ( + serverCfg.enabledLoginMethods && + !serverCfg.enabledLoginMethods.includes(LoginMethod.EMAIL) && + !providerAuthToken + ) { + throw new BadRequestError({ + message: "Login with email is disabled by administrator." + }); + } + if (!userEnc || (userEnc && !userEnc.isAccepted)) { throw new Error("Failed to find user"); } + if (!userEnc.authMethods?.includes(AuthMethod.EMAIL)) { validateProviderAuthToken(providerAuthToken as string, email); } @@ -178,7 +205,8 @@ export const authLoginServiceFactory = ({ ip, userAgent, providerAuthToken, - captchaToken + captchaToken, + password }: TLoginClientProofDTO) => { const appCfg = getConfig(); @@ -196,7 +224,10 @@ export const authLoginServiceFactory = ({ const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email); authMethod = decodedProviderToken.authMethod; - if ((isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && decodedProviderToken.orgId) { + if ( + (isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod)) && + decodedProviderToken.orgId + ) { organizationId = decodedProviderToken.orgId; } } @@ -248,37 +279,34 @@ export const authLoginServiceFactory = ({ throw new Error("Failed to authenticate. Try again?"); } - await userDAL.updateUserEncryptionByUserId(userEnc.userId, { - serverPrivateKey: null, - clientPublicKey: null - }); - await userDAL.updateById(userEnc.userId, { consecutiveFailedPasswordAttempts: 0 }); - - // send multi factor auth token if they it enabled - if (userEnc.isMfaEnabled && userEnc.email) { - enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd); - - const mfaToken = jwt.sign( - { - authMethod, - authTokenType: AuthTokenType.MFA_TOKEN, - userId: userEnc.userId - }, - cfg.AUTH_SECRET, - { - expiresIn: cfg.JWT_MFA_LIFETIME - } - ); - - await sendUserMfaCode({ - userId: userEnc.userId, - email: userEnc.email + // from password decrypt the private key + if (password) { + const privateKey = await getUserPrivateKey(password, userEnc).catch((err) => { + logger.error( + err, + `loginExchangeClientProof: private key generation failed for [userId=${user.id}] and [email=${user.email}] ` + ); + return ""; + }); + const hashedPassword = await bcrypt.hash(password, cfg.BCRYPT_SALT_ROUND); + const { iv, tag, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey); + await userDAL.updateUserEncryptionByUserId(userEnc.userId, { + serverPrivateKey: null, + clientPublicKey: null, + hashedPassword, + serverEncryptedPrivateKey: ciphertext, + serverEncryptedPrivateKeyIV: iv, + serverEncryptedPrivateKeyTag: tag, + serverEncryptedPrivateKeyEncoding: encoding + }); + } else { + await userDAL.updateUserEncryptionByUserId(userEnc.userId, { + serverPrivateKey: null, + clientPublicKey: null }); - - return { isMfaEnabled: true, token: mfaToken } as const; } const token = await generateUserTokens({ @@ -292,7 +320,7 @@ export const authLoginServiceFactory = ({ organizationId }); - return { token, isMfaEnabled: false, user: userEnc } as const; + return { token, user: userEnc } as const; }; const selectOrganization = async ({ @@ -309,7 +337,7 @@ export const authLoginServiceFactory = ({ const cfg = getConfig(); if (!authJwtToken) throw new UnauthorizedError({ name: "Authorization header is required" }); - if (!userAgent) throw new UnauthorizedError({ name: "user agent header is required" }); + if (!userAgent) throw new UnauthorizedError({ name: "User-Agent header is required" }); // eslint-disable-next-line no-param-reassign authJwtToken = authJwtToken.replace("Bearer ", ""); // remove bearer from token @@ -324,22 +352,58 @@ export const authLoginServiceFactory = ({ // Check if the user actually has access to the specified organization. const userOrgs = await orgDAL.findAllOrgsByUserId(user.id); const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId); + const selectedOrg = await orgDAL.findById(organizationId); if (!hasOrganizationMembership) { - throw new UnauthorizedError({ message: "User does not have access to the organization" }); + throw new ForbiddenRequestError({ + message: `User does not have access to the organization named ${selectedOrg?.name}` + }); } - await tokenDAL.incrementTokenSessionVersion(user.id, decodedToken.tokenVersionId); + const shouldCheckMfa = selectedOrg.enforceMfa || user.isMfaEnabled; + const orgMfaMethod = selectedOrg.enforceMfa ? selectedOrg.selectedMfaMethod ?? MfaMethod.EMAIL : undefined; + const userMfaMethod = user.isMfaEnabled ? user.selectedMfaMethod ?? MfaMethod.EMAIL : undefined; + const mfaMethod = orgMfaMethod ?? userMfaMethod; + + if (shouldCheckMfa && (!decodedToken.isMfaVerified || decodedToken.mfaMethod !== mfaMethod)) { + enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd); + + const mfaToken = jwt.sign( + { + authMethod: decodedToken.authMethod, + authTokenType: AuthTokenType.MFA_TOKEN, + userId: user.id + }, + cfg.AUTH_SECRET, + { + expiresIn: cfg.JWT_MFA_LIFETIME + } + ); + + if (mfaMethod === MfaMethod.EMAIL && user.email) { + await sendUserMfaCode({ + userId: user.id, + email: user.email + }); + } + + return { isMfaEnabled: true, mfa: mfaToken, mfaMethod } as const; + } const tokens = await generateUserTokens({ authMethod: decodedToken.authMethod, user, userAgent, ip: ipAddress, - organizationId + organizationId, + isMfaVerified: decodedToken.isMfaVerified, + mfaMethod: decodedToken.mfaMethod }); - return tokens; + return { + ...tokens, + isMfaEnabled: false + }; }; /* @@ -408,17 +472,39 @@ export const authLoginServiceFactory = ({ * Multi factor authentication verification of code * Third step of login in which user completes with mfa * */ - const verifyMfaToken = async ({ userId, mfaToken, mfaJwtToken, ip, userAgent, orgId }: TVerifyMfaTokenDTO) => { + const verifyMfaToken = async ({ + userId, + mfaToken, + mfaMethod, + mfaJwtToken, + ip, + userAgent, + orgId + }: TVerifyMfaTokenDTO) => { const appCfg = getConfig(); const user = await userDAL.findById(userId); enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd); try { - await tokenService.validateTokenForUser({ - type: TokenType.TOKEN_EMAIL_MFA, - userId, - code: mfaToken - }); + if (mfaMethod === MfaMethod.EMAIL) { + await tokenService.validateTokenForUser({ + type: TokenType.TOKEN_EMAIL_MFA, + userId, + code: mfaToken + }); + } else if (mfaMethod === MfaMethod.TOTP) { + if (mfaToken.length === 6) { + await totpService.verifyUserTotp({ + userId, + totp: mfaToken + }); + } else { + await totpService.verifyWithUserRecoveryCode({ + userId, + recoveryCode: mfaToken + }); + } + } } catch (err) { const updatedUser = await processFailedMfaAttempt(userId); if (updatedUser.isLocked) { @@ -462,7 +548,9 @@ export const authLoginServiceFactory = ({ ip, userAgent, organizationId: orgId, - authMethod: decodedToken.authMethod + authMethod: decodedToken.authMethod, + isMfaVerified: true, + mfaMethod }); return { token, user: userEnc }; @@ -474,6 +562,40 @@ export const authLoginServiceFactory = ({ let user = await userDAL.findUserByUsername(email); const serverCfg = await getServerCfg(); + if (serverCfg.enabledLoginMethods) { + switch (authMethod) { + case AuthMethod.GITHUB: { + if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GITHUB)) { + throw new BadRequestError({ + message: "Login with Github is disabled by administrator.", + name: "Oauth 2 login" + }); + } + break; + } + case AuthMethod.GOOGLE: { + if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GOOGLE)) { + throw new BadRequestError({ + message: "Login with Google is disabled by administrator.", + name: "Oauth 2 login" + }); + } + break; + } + case AuthMethod.GITLAB: { + if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GITLAB)) { + throw new BadRequestError({ + message: "Login with Gitlab is disabled by administrator.", + name: "Oauth 2 login" + }); + } + break; + } + default: + break; + } + } + const appCfg = getConfig(); if (!user) { @@ -499,8 +621,20 @@ export const authLoginServiceFactory = ({ authMethods: [authMethod], isGhost: false }); + } else { + const isLinkingRequired = !user?.authMethods?.includes(authMethod); + if (isLinkingRequired) { + // we update the names here because upon org invitation, the names are set to be NULL + // if user is signing up with SSO after invitation, their names should be set based on their SSO profile + user = await userDAL.updateById(user.id, { + authMethods: [...(user.authMethods || []), authMethod], + firstName: !user.isAccepted ? firstName : undefined, + lastName: !user.isAccepted ? lastName : undefined + }); + } } - const isLinkingRequired = !user?.authMethods?.includes(authMethod); + + const userEnc = await userDAL.findUserEncKeyByUserId(user.id); const isUserCompleted = user.isAccepted; const providerAuthToken = jwt.sign( { @@ -511,9 +645,9 @@ export const authLoginServiceFactory = ({ isEmailVerified: user.isEmailVerified, firstName: user.firstName, lastName: user.lastName, + hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey), authMethod, isUserCompleted, - isLinkingRequired, ...(callbackPort ? { callbackPort @@ -525,10 +659,48 @@ export const authLoginServiceFactory = ({ expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME } ); - return { isUserCompleted, providerAuthToken }; }; + /** + * Handles OAuth2 token exchange for user login with private key handoff. + * + * The process involves exchanging a provider's authorization token for an Infisical access token. + * The provider token is returned to the client, who then sends it back to obtain the Infisical access token. + * + * This approach is used instead of directly sending the access token for the following reasons: + * 1. To facilitate easier logic changes from SRP OAuth to simple OAuth. + * 2. To avoid attaching the access token to the URL, which could be logged. The provider token has a very short lifespan, reducing security risks. + */ + const oauth2TokenExchange = async ({ userAgent, ip, providerAuthToken, email }: TOauthTokenExchangeDTO) => { + const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email); + + const { authMethod, userName } = decodedProviderToken; + if (!userName) throw new BadRequestError({ message: "Missing user name" }); + const organizationId = + (isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod)) && + decodedProviderToken.orgId + ? decodedProviderToken.orgId + : undefined; + + const userEnc = await userDAL.findUserEncKeyByUsername({ + username: email + }); + if (!userEnc) throw new BadRequestError({ message: "Invalid token" }); + if (!userEnc.serverEncryptedPrivateKey) + throw new BadRequestError({ message: "Key handoff incomplete. Please try logging in again." }); + + const token = await generateUserTokens({ + user: { ...userEnc, id: userEnc.userId }, + ip, + userAgent, + authMethod, + organizationId + }); + + return { token, isMfaEnabled: false, user: userEnc } as const; + }; + /* * logout user by incrementing the version by 1 meaning any old session will become invalid * as there number is behind @@ -542,6 +714,7 @@ export const authLoginServiceFactory = ({ loginExchangeClientProof, logout, oauth2Login, + oauth2TokenExchange, resendMfaToken, verifyMfaToken, selectOrganization, diff --git a/backend/src/services/auth/auth-login-type.ts b/backend/src/services/auth/auth-login-type.ts index 4f73ec9961..d9d9520a83 100644 --- a/backend/src/services/auth/auth-login-type.ts +++ b/backend/src/services/auth/auth-login-type.ts @@ -1,4 +1,4 @@ -import { AuthMethod } from "./auth-type"; +import { AuthMethod, MfaMethod } from "./auth-type"; export type TLoginGenServerPublicKeyDTO = { email: string; @@ -13,11 +13,13 @@ export type TLoginClientProofDTO = { ip: string; userAgent: string; captchaToken?: string; + password?: string; }; export type TVerifyMfaTokenDTO = { userId: string; mfaToken: string; + mfaMethod: MfaMethod; mfaJwtToken: string; ip: string; userAgent: string; @@ -31,3 +33,10 @@ export type TOauthLoginDTO = { authMethod: AuthMethod; callbackPort?: string; }; + +export type TOauthTokenExchangeDTO = { + providerAuthToken: string; + ip: string; + userAgent: string; + email: string; +}; diff --git a/backend/src/services/auth/auth-password-service.ts b/backend/src/services/auth/auth-password-service.ts index a400c297b0..9ed9951fe1 100644 --- a/backend/src/services/auth/auth-password-service.ts +++ b/backend/src/services/auth/auth-password-service.ts @@ -1,3 +1,4 @@ +import bcrypt from "bcrypt"; import jwt from "jsonwebtoken"; import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas"; @@ -7,6 +8,7 @@ import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto"; import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service"; import { TokenType } from "../auth-token/auth-token-types"; import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; +import { TTotpConfigDALFactory } from "../totp/totp-config-dal"; import { TUserDALFactory } from "../user/user-dal"; import { TAuthDALFactory } from "./auth-dal"; import { TChangePasswordDTO, TCreateBackupPrivateKeyDTO, TResetPasswordViaBackupKeyDTO } from "./auth-password-type"; @@ -17,6 +19,7 @@ type TAuthPasswordServiceFactoryDep = { userDAL: TUserDALFactory; tokenService: TAuthTokenServiceFactory; smtpService: TSmtpService; + totpConfigDAL: Pick; }; export type TAuthPasswordFactory = ReturnType; @@ -24,7 +27,8 @@ export const authPaswordServiceFactory = ({ authDAL, userDAL, tokenService, - smtpService + smtpService, + totpConfigDAL }: TAuthPasswordServiceFactoryDep) => { /* * Pre setup for pass change with srp protocol @@ -57,7 +61,8 @@ export const authPaswordServiceFactory = ({ encryptedPrivateKeyTag, salt, verifier, - tokenVersionId + tokenVersionId, + password }: TChangePasswordDTO) => { const userEnc = await userDAL.findUserEncKeyByUserId(userId); if (!userEnc) throw new Error("Failed to find user"); @@ -76,6 +81,8 @@ export const authPaswordServiceFactory = ({ ); if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?"); + const appCfg = getConfig(); + const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND); await userDAL.updateUserEncryptionByUserId(userId, { encryptionVersion: 2, protectedKey, @@ -87,7 +94,8 @@ export const authPaswordServiceFactory = ({ salt, verifier, serverPrivateKey: null, - clientPublicKey: null + clientPublicKey: null, + hashedPassword }); if (tokenVersionId) { @@ -180,6 +188,12 @@ export const authPaswordServiceFactory = ({ temporaryLockDateEnd: null, consecutiveFailedMfaAttempts: 0 }); + + /* we reset the mobile authenticator configs of the user + because we want this to be one of the recovery modes from account lockout */ + await totpConfigDAL.delete({ + userId + }); }; /* diff --git a/backend/src/services/auth/auth-password-type.ts b/backend/src/services/auth/auth-password-type.ts index cf2aac08d6..a52374506a 100644 --- a/backend/src/services/auth/auth-password-type.ts +++ b/backend/src/services/auth/auth-password-type.ts @@ -10,6 +10,7 @@ export type TChangePasswordDTO = { salt: string; verifier: string; tokenVersionId?: string; + password: string; }; export type TResetPasswordViaBackupKeyDTO = { diff --git a/backend/src/services/auth/auth-signup-service.ts b/backend/src/services/auth/auth-signup-service.ts index be7f5777db..b55d013081 100644 --- a/backend/src/services/auth/auth-signup-service.ts +++ b/backend/src/services/auth/auth-signup-service.ts @@ -1,12 +1,15 @@ +import bcrypt from "bcrypt"; import jwt from "jsonwebtoken"; -import { OrgMembershipStatus, TableName } from "@app/db/schemas"; +import { OrgMembershipStatus, SecretKeyEncoding, TableName } from "@app/db/schemas"; import { convertPendingGroupAdditionsToGroupMemberships } from "@app/ee/services/group/group-fns"; import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError } from "@app/lib/errors"; +import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; +import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp"; +import { NotFoundError } from "@app/lib/errors"; import { isDisposableEmail } from "@app/lib/validator"; import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; @@ -17,8 +20,11 @@ import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service"; import { TokenType } from "../auth-token/auth-token-types"; import { TOrgDALFactory } from "../org/org-dal"; import { TOrgServiceFactory } from "../org/org-service"; +import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal"; +import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal"; import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; import { TUserDALFactory } from "../user/user-dal"; +import { UserEncryption } from "../user/user-types"; import { TAuthDALFactory } from "./auth-dal"; import { validateProviderAuthToken, validateSignUpAuthorization } from "./auth-fns"; import { TCompleteAccountInviteDTO, TCompleteAccountSignupDTO } from "./auth-signup-type"; @@ -29,10 +35,14 @@ type TAuthSignupDep = { userDAL: TUserDALFactory; userGroupMembershipDAL: Pick< TUserGroupMembershipDALFactory, - "find" | "transaction" | "insertMany" | "deletePendingUserGroupMembershipsByUserIds" + | "find" + | "transaction" + | "insertMany" + | "deletePendingUserGroupMembershipsByUserIds" + | "findUserGroupMembershipsInProject" >; projectKeyDAL: Pick; - projectDAL: Pick; + projectDAL: Pick; projectBotDAL: Pick; groupProjectDAL: Pick; orgService: Pick; @@ -40,6 +50,8 @@ type TAuthSignupDep = { tokenService: TAuthTokenServiceFactory; smtpService: TSmtpService; licenseService: Pick; + projectMembershipDAL: Pick; + projectUserMembershipRoleDAL: Pick; }; export type TAuthSignupFactory = ReturnType; @@ -119,6 +131,7 @@ export const authSignupServiceFactory = ({ const completeEmailAccountSignup = async ({ email, + password, firstName, lastName, providerAuthToken, @@ -137,6 +150,7 @@ export const authSignupServiceFactory = ({ userAgent, authorization }: TCompleteAccountSignupDTO) => { + const appCfg = getConfig(); const user = await userDAL.findOne({ username: email }); if (!user || (user && user.isAccepted)) { throw new Error("Failed to complete account for complete user"); @@ -152,26 +166,102 @@ export const authSignupServiceFactory = ({ validateSignUpAuthorization(authorization, user.id); } + const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND); + const privateKey = await getUserPrivateKey(password, { + salt, + protectedKey, + protectedKeyIV, + protectedKeyTag, + encryptedPrivateKey, + iv: encryptedPrivateKeyIV, + tag: encryptedPrivateKeyTag, + encryptionVersion: UserEncryption.V2 + }); + const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(privateKey); const updateduser = await authDAL.transaction(async (tx) => { const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx); if (!us) throw new Error("User not found"); - const userEncKey = await userDAL.upsertUserEncryptionKey( - us.id, - { - salt, - verifier, - publicKey, - protectedKey, - protectedKeyIV, - protectedKeyTag, - encryptedPrivateKey, - iv: encryptedPrivateKeyIV, - tag: encryptedPrivateKeyTag - }, - tx - ); + const systemGeneratedUserEncryptionKey = await userDAL.findUserEncKeyByUserId(us.id, tx); + let userEncKey; + + // below condition is true means this is system generated credentials + // the private key is actually system generated password + // thus we will re-encrypt the system generated private key with the new password + // akhilmhdh: you may find this like why? The reason is simple we are moving away from e2ee and these are pieces of it + // without a dummy key in place some things will break and backward compatiability too. 2025 we will be removing all these things + if ( + systemGeneratedUserEncryptionKey && + !systemGeneratedUserEncryptionKey.hashedPassword && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding + ) { + // get server generated password + const serverGeneratedPassword = infisicalSymmetricDecrypt({ + iv: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV, + tag: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag, + ciphertext: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey, + keyEncoding: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding + }); + const serverGeneratedPrivateKey = await getUserPrivateKey(serverGeneratedPassword, { + ...systemGeneratedUserEncryptionKey + }); + const encKeys = await generateUserSrpKeys(email, password, { + publicKey: systemGeneratedUserEncryptionKey.publicKey, + privateKey: serverGeneratedPrivateKey + }); + // now reencrypt server generated key with user provided password + userEncKey = await userDAL.upsertUserEncryptionKey( + us.id, + { + encryptionVersion: UserEncryption.V2, + protectedKey: encKeys.protectedKey, + protectedKeyIV: encKeys.protectedKeyIV, + protectedKeyTag: encKeys.protectedKeyTag, + publicKey: encKeys.publicKey, + encryptedPrivateKey: encKeys.encryptedPrivateKey, + iv: encKeys.encryptedPrivateKeyIV, + tag: encKeys.encryptedPrivateKeyTag, + salt: encKeys.salt, + verifier: encKeys.verifier, + hashedPassword, + serverEncryptedPrivateKeyEncoding: encoding, + serverEncryptedPrivateKeyTag: tag, + serverEncryptedPrivateKeyIV: iv, + serverEncryptedPrivateKey: ciphertext + }, + tx + ); + } else { + userEncKey = await userDAL.upsertUserEncryptionKey( + us.id, + { + encryptionVersion: UserEncryption.V2, + salt, + verifier, + publicKey, + protectedKey, + protectedKeyIV, + protectedKeyTag, + encryptedPrivateKey, + iv: encryptedPrivateKeyIV, + tag: encryptedPrivateKeyTag, + hashedPassword, + serverEncryptedPrivateKeyEncoding: encoding, + serverEncryptedPrivateKeyTag: tag, + serverEncryptedPrivateKeyIV: iv, + serverEncryptedPrivateKey: ciphertext + }, + tx + ); + } + // If it's SAML Auth and the organization ID is present, we should check if the user has a pending invite for this org, and accept it - if ((isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && organizationId) { + if ( + (isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod as AuthMethod)) && + organizationId + ) { const [pendingOrgMembership] = await orgDAL.findMembership({ [`${TableName.OrgMembership}.userId` as "userId"]: user.id, status: OrgMembershipStatus.Invited, @@ -227,11 +317,10 @@ export const authSignupServiceFactory = ({ userId: updateduser.info.id }); if (!tokenSession) throw new Error("Failed to create token"); - const appCfg = getConfig(); const accessToken = jwt.sign( { - authMethod: AuthMethod.EMAIL, + authMethod: authMethod || AuthMethod.EMAIL, authTokenType: AuthTokenType.ACCESS_TOKEN, userId: updateduser.info.id, tokenVersionId: tokenSession.id, @@ -244,7 +333,7 @@ export const authSignupServiceFactory = ({ const refreshToken = jwt.sign( { - authMethod: AuthMethod.EMAIL, + authMethod: authMethod || AuthMethod.EMAIL, authTokenType: AuthTokenType.REFRESH_TOKEN, userId: updateduser.info.id, tokenVersionId: tokenSession.id, @@ -265,6 +354,7 @@ export const authSignupServiceFactory = ({ ip, salt, email, + password, verifier, firstName, publicKey, @@ -290,30 +380,98 @@ export const authSignupServiceFactory = ({ status: OrgMembershipStatus.Invited }); if (!orgMembership) - throw new BadRequestError({ + throw new NotFoundError({ message: "Failed to find invitation for email", name: "complete account invite" }); + const appCfg = getConfig(); + const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND); + const privateKey = await getUserPrivateKey(password, { + salt, + protectedKey, + protectedKeyIV, + protectedKeyTag, + encryptedPrivateKey, + iv: encryptedPrivateKeyIV, + tag: encryptedPrivateKeyTag, + encryptionVersion: 2 + }); + const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(privateKey); const updateduser = await authDAL.transaction(async (tx) => { const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx); if (!us) throw new Error("User not found"); - const userEncKey = await userDAL.upsertUserEncryptionKey( - us.id, - { - salt, - encryptionVersion: 2, - verifier, - publicKey, - protectedKey, - protectedKeyIV, - protectedKeyTag, - encryptedPrivateKey, - iv: encryptedPrivateKeyIV, - tag: encryptedPrivateKeyTag - }, - tx - ); + const systemGeneratedUserEncryptionKey = await userDAL.findUserEncKeyByUserId(us.id, tx); + let userEncKey; + // this means this is system generated credentials + // now replace the private key + if ( + systemGeneratedUserEncryptionKey && + !systemGeneratedUserEncryptionKey.hashedPassword && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV && + systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding + ) { + // get server generated password + const serverGeneratedPassword = infisicalSymmetricDecrypt({ + iv: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV, + tag: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag, + ciphertext: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey, + keyEncoding: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding + }); + const serverGeneratedPrivateKey = await getUserPrivateKey(serverGeneratedPassword, { + ...systemGeneratedUserEncryptionKey + }); + const encKeys = await generateUserSrpKeys(email, password, { + publicKey: systemGeneratedUserEncryptionKey.publicKey, + privateKey: serverGeneratedPrivateKey + }); + // now reencrypt server generated key with user provided password + userEncKey = await userDAL.upsertUserEncryptionKey( + us.id, + { + encryptionVersion: 2, + protectedKey: encKeys.protectedKey, + protectedKeyIV: encKeys.protectedKeyIV, + protectedKeyTag: encKeys.protectedKeyTag, + publicKey: encKeys.publicKey, + encryptedPrivateKey: encKeys.encryptedPrivateKey, + iv: encKeys.encryptedPrivateKeyIV, + tag: encKeys.encryptedPrivateKeyTag, + salt: encKeys.salt, + verifier: encKeys.verifier, + hashedPassword, + serverEncryptedPrivateKeyEncoding: encoding, + serverEncryptedPrivateKeyTag: tag, + serverEncryptedPrivateKeyIV: iv, + serverEncryptedPrivateKey: ciphertext + }, + tx + ); + } else { + userEncKey = await userDAL.upsertUserEncryptionKey( + us.id, + { + encryptionVersion: UserEncryption.V2, + salt, + verifier, + publicKey, + protectedKey, + protectedKeyIV, + protectedKeyTag, + encryptedPrivateKey, + iv: encryptedPrivateKeyIV, + tag: encryptedPrivateKeyTag, + hashedPassword, + serverEncryptedPrivateKeyEncoding: encoding, + serverEncryptedPrivateKeyTag: tag, + serverEncryptedPrivateKeyIV: iv, + serverEncryptedPrivateKey: ciphertext + }, + tx + ); + } const updatedMembersips = await orgDAL.updateMembership( { inviteEmail: email, status: OrgMembershipStatus.Invited }, @@ -321,7 +479,7 @@ export const authSignupServiceFactory = ({ tx ); const uniqueOrgId = [...new Set(updatedMembersips.map(({ orgId }) => orgId))]; - await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId))); + await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId, tx))); await convertPendingGroupAdditionsToGroupMemberships({ userIds: [user.id], @@ -343,7 +501,6 @@ export const authSignupServiceFactory = ({ userId: updateduser.info.id }); if (!tokenSession) throw new Error("Failed to create token"); - const appCfg = getConfig(); const accessToken = jwt.sign( { diff --git a/backend/src/services/auth/auth-signup-type.ts b/backend/src/services/auth/auth-signup-type.ts index a37a1cd96d..3308b9d129 100644 --- a/backend/src/services/auth/auth-signup-type.ts +++ b/backend/src/services/auth/auth-signup-type.ts @@ -1,5 +1,6 @@ export type TCompleteAccountSignupDTO = { email: string; + password: string; firstName: string; lastName?: string; protectedKey: string; @@ -21,6 +22,7 @@ export type TCompleteAccountSignupDTO = { export type TCompleteAccountInviteDTO = { email: string; + password: string; firstName: string; lastName?: string; protectedKey: string; @@ -35,4 +37,5 @@ export type TCompleteAccountInviteDTO = { ip: string; userAgent: string; authorization: string; + tokenMetadata?: string; }; diff --git a/backend/src/services/auth/auth-type.ts b/backend/src/services/auth/auth-type.ts index 8e7b922539..c1bf2b6fbd 100644 --- a/backend/src/services/auth/auth-type.ts +++ b/backend/src/services/auth/auth-type.ts @@ -8,7 +8,8 @@ export enum AuthMethod { JUMPCLOUD_SAML = "jumpcloud-saml", GOOGLE_SAML = "google-saml", KEYCLOAK_SAML = "keycloak-saml", - LDAP = "ldap" + LDAP = "ldap", + OIDC = "oidc" } export enum AuthTokenType { @@ -33,6 +34,7 @@ export enum AuthMode { } export enum ActorType { // would extend to AWS, Azure, ... + PLATFORM = "platform", // Useful for when we want to perform logging on automated actions such as integration syncs. USER = "user", // userIdentity SERVICE = "service", IDENTITY = "identity", @@ -50,6 +52,8 @@ export type AuthModeJwtTokenPayload = { tokenVersionId: string; accessVersion: number; organizationId?: string; + isMfaVerified?: boolean; + mfaMethod?: MfaMethod; }; export type AuthModeMfaJwtTokenPayload = { @@ -67,6 +71,8 @@ export type AuthModeRefreshJwtTokenPayload = { tokenVersionId: string; refreshVersion: number; organizationId?: string; + isMfaVerified?: boolean; + mfaMethod?: MfaMethod; }; export type AuthModeProviderJwtTokenPayload = { @@ -81,3 +87,8 @@ export type AuthModeProviderSignUpTokenPayload = { authTokenType: AuthTokenType.SIGNUP_TOKEN; userId: string; }; + +export enum MfaMethod { + EMAIL = "email", + TOTP = "totp" +} diff --git a/backend/src/services/certificate-authority/certificate-authority-cert-dal.ts b/backend/src/services/certificate-authority/certificate-authority-cert-dal.ts new file mode 100644 index 0000000000..763240986b --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-cert-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateAuthorityCertDALFactory = ReturnType; + +export const certificateAuthorityCertDALFactory = (db: TDbClient) => { + const caCertOrm = ormify(db, TableName.CertificateAuthorityCert); + return caCertOrm; +}; diff --git a/backend/src/services/certificate-authority/certificate-authority-dal.ts b/backend/src/services/certificate-authority/certificate-authority-dal.ts new file mode 100644 index 0000000000..837bbcf378 --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-dal.ts @@ -0,0 +1,49 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateAuthorityDALFactory = ReturnType; + +export const certificateAuthorityDALFactory = (db: TDbClient) => { + const caOrm = ormify(db, TableName.CertificateAuthority); + + // note: not used + const buildCertificateChain = async (caId: string) => { + try { + const result: { + caId: string; + parentCaId?: string; + encryptedCertificate: Buffer; + }[] = await db + .replicaNode() + .withRecursive("cte", (cte) => { + void cte + .select("ca.id as caId", "ca.parentCaId", "cert.encryptedCertificate") + .from({ ca: TableName.CertificateAuthority }) + .leftJoin({ cert: TableName.CertificateAuthorityCert }, "ca.id", "cert.caId") + .where("ca.id", caId) + .unionAll((builder) => { + void builder + .select("ca.id as caId", "ca.parentCaId", "cert.encryptedCertificate") + .from({ ca: TableName.CertificateAuthority }) + .leftJoin({ cert: TableName.CertificateAuthorityCert }, "ca.id", "cert.caId") + .innerJoin("cte", "cte.parentCaId", "ca.id"); + }); + }) + .select("*") + .from("cte"); + + // Extract certificates and reverse the order to have the root CA at the end + const certChain: Buffer[] = result.map((row) => row.encryptedCertificate); + return certChain; + } catch (error) { + throw new DatabaseError({ error, name: "BuildCertificateChain" }); + } + }; + + return { + ...caOrm, + buildCertificateChain + }; +}; diff --git a/backend/src/services/certificate-authority/certificate-authority-fns.ts b/backend/src/services/certificate-authority/certificate-authority-fns.ts new file mode 100644 index 0000000000..efb582d88d --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-fns.ts @@ -0,0 +1,320 @@ +import * as x509 from "@peculiar/x509"; +import crypto from "crypto"; + +import { NotFoundError } from "@app/lib/errors"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { CertKeyAlgorithm, CertStatus } from "../certificate/certificate-types"; +import { + TDNParts, + TGetCaCertChainDTO, + TGetCaCertChainsDTO, + TGetCaCredentialsDTO, + TRebuildCaCrlDTO +} from "./certificate-authority-types"; + +/* eslint-disable no-bitwise */ +export const createSerialNumber = () => { + const randomBytes = crypto.randomBytes(20); + randomBytes[0] &= 0x7f; // ensure the first bit is 0 + return randomBytes.toString("hex"); +}; + +export const createDistinguishedName = (parts: TDNParts) => { + const dnParts = []; + if (parts.country) dnParts.push(`C=${parts.country}`); + if (parts.organization) dnParts.push(`O=${parts.organization}`); + if (parts.ou) dnParts.push(`OU=${parts.ou}`); + if (parts.province) dnParts.push(`ST=${parts.province}`); + if (parts.commonName) dnParts.push(`CN=${parts.commonName}`); + if (parts.locality) dnParts.push(`L=${parts.locality}`); + return dnParts.join(", "); +}; + +export const parseDistinguishedName = (dn: string): TDNParts => { + const parts: TDNParts = {}; + const dnParts = dn.split(/,\s*/); + + for (const part of dnParts) { + const [key, value] = part.split("="); + switch (key.toUpperCase()) { + case "C": + parts.country = value; + break; + case "O": + parts.organization = value; + break; + case "OU": + parts.ou = value; + break; + case "ST": + parts.province = value; + break; + case "CN": + parts.commonName = value; + break; + case "L": + parts.locality = value; + break; + default: + // Ignore unrecognized keys + break; + } + } + + return parts; +}; + +export const keyAlgorithmToAlgCfg = (keyAlgorithm: CertKeyAlgorithm) => { + switch (keyAlgorithm) { + case CertKeyAlgorithm.RSA_4096: + return { + name: "RSASSA-PKCS1-v1_5", + hash: "SHA-256", + publicExponent: new Uint8Array([1, 0, 1]), + modulusLength: 4096 + }; + case CertKeyAlgorithm.ECDSA_P256: + return { + name: "ECDSA", + namedCurve: "P-256", + hash: "SHA-256" + }; + case CertKeyAlgorithm.ECDSA_P384: + return { + name: "ECDSA", + namedCurve: "P-384", + hash: "SHA-384" + }; + default: { + // RSA_2048 + return { + name: "RSASSA-PKCS1-v1_5", + hash: "SHA-256", + publicExponent: new Uint8Array([1, 0, 1]), + modulusLength: 2048 + }; + } + } +}; + +/** + * Return the public and private key of CA with id [caId] + * Note: credentials are returned as crypto.webcrypto.CryptoKey + * suitable for use with @peculiar/x509 module + * + * TODO: Update to get latest CA Secret once support for CA renewal with new key pair is added + */ +export const getCaCredentials = async ({ + caId, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService +}: TGetCaCredentialsDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const caSecret = await certificateAuthoritySecretDAL.findOne({ caId }); + if (!caSecret) throw new NotFoundError({ message: `CA secret for CA with ID '${caId}' not found` }); + + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + const decryptedPrivateKey = await kmsDecryptor({ + cipherTextBlob: caSecret.encryptedPrivateKey + }); + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + const skObj = crypto.createPrivateKey({ key: decryptedPrivateKey, format: "der", type: "pkcs8" }); + const caPrivateKey = await crypto.subtle.importKey( + "pkcs8", + skObj.export({ format: "der", type: "pkcs8" }), + alg, + true, + ["sign"] + ); + + const pkObj = crypto.createPublicKey(skObj); + const caPublicKey = await crypto.subtle.importKey("spki", pkObj.export({ format: "der", type: "spki" }), alg, true, [ + "verify" + ]); + + return { + caSecret, + caPrivateKey, + caPublicKey + }; +}; + +/** + * Return the list of decrypted pem-encoded certificates and certificate chains + * for CA with id [caId]. + */ +export const getCaCertChains = async ({ + caId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService +}: TGetCaCertChainsDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + + const caCerts = await certificateAuthorityCertDAL.find({ caId: ca.id }, { sort: [["version", "asc"]] }); + + const decryptedChains = await Promise.all( + caCerts.map(async (caCert) => { + const decryptedCaCert = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificate + }); + const caCertObj = new x509.X509Certificate(decryptedCaCert); + const decryptedChain = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificateChain + }); + return { + certificate: caCertObj.toString("pem"), + certificateChain: decryptedChain.toString("utf-8"), + serialNumber: caCertObj.serialNumber, + version: caCert.version + }; + }) + ); + + return decryptedChains; +}; + +/** + * Return the decrypted pem-encoded certificate and certificate chain + * corresponding to CA certificate with id [caCertId]. + */ +export const getCaCertChain = async ({ + caCertId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService +}: TGetCaCertChainDTO) => { + const caCert = await certificateAuthorityCertDAL.findById(caCertId); + if (!caCert) throw new NotFoundError({ message: "CA certificate not found" }); + const ca = await certificateAuthorityDAL.findById(caCert.caId); + + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + + const decryptedCaCert = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificate + }); + + const caCertObj = new x509.X509Certificate(decryptedCaCert); + + const decryptedChain = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificateChain + }); + + return { + caCert: caCertObj.toString("pem"), + caCertChain: decryptedChain.toString("utf-8"), + serialNumber: caCertObj.serialNumber + }; +}; + +/** + * Rebuilds the certificate revocation list (CRL) + * for CA with id [caId] + */ +export const rebuildCaCrl = async ({ + caId, + certificateAuthorityDAL, + certificateAuthorityCrlDAL, + certificateAuthoritySecretDAL, + projectDAL, + certificateDAL, + kmsService +}: TRebuildCaCrlDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id }); + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + + const privateKey = await kmsDecryptor({ + cipherTextBlob: caSecret.encryptedPrivateKey + }); + + const skObj = crypto.createPrivateKey({ key: privateKey, format: "der", type: "pkcs8" }); + const sk = await crypto.subtle.importKey("pkcs8", skObj.export({ format: "der", type: "pkcs8" }), alg, true, [ + "sign" + ]); + + const revokedCerts = await certificateDAL.find({ + caId: ca.id, + status: CertStatus.REVOKED + }); + + const crl = await x509.X509CrlGenerator.create({ + issuer: ca.dn, + thisUpdate: new Date(), + nextUpdate: new Date("2025/12/12"), + entries: revokedCerts.map((revokedCert) => { + const revocationDate = new Date(revokedCert.revokedAt as Date); + return { + serialNumber: revokedCert.serialNumber, + revocationDate, + reason: revokedCert.revocationReason as number + }; + }), + signingAlgorithm: alg, + signingKey: sk + }); + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: keyId + }); + const { cipherTextBlob: encryptedCrl } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(crl.rawData)) + }); + + await certificateAuthorityCrlDAL.update( + { + caId: ca.id + }, + { + encryptedCrl + } + ); +}; diff --git a/backend/src/services/certificate-authority/certificate-authority-queue.ts b/backend/src/services/certificate-authority/certificate-authority-queue.ts new file mode 100644 index 0000000000..8c6d3906d0 --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-queue.ts @@ -0,0 +1,149 @@ +import * as x509 from "@peculiar/x509"; +import crypto from "crypto"; + +import { getConfig } from "@app/lib/config/env"; +import { daysToMillisecond, secondsToMillis } from "@app/lib/dates"; +import { NotFoundError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; +import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { CertKeyAlgorithm, CertStatus } from "@app/services/certificate/certificate-types"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { TCertificateAuthorityCrlDALFactory } from "../../ee/services/certificate-authority-crl/certificate-authority-crl-dal"; +import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal"; +import { keyAlgorithmToAlgCfg } from "./certificate-authority-fns"; +import { TCertificateAuthoritySecretDALFactory } from "./certificate-authority-secret-dal"; +import { TRotateCaCrlTriggerDTO } from "./certificate-authority-types"; + +type TCertificateAuthorityQueueFactoryDep = { + // TODO: Pick + certificateAuthorityDAL: TCertificateAuthorityDALFactory; + certificateAuthorityCrlDAL: TCertificateAuthorityCrlDALFactory; + certificateAuthoritySecretDAL: TCertificateAuthoritySecretDALFactory; + certificateDAL: TCertificateDALFactory; + projectDAL: Pick; + kmsService: Pick; + queueService: TQueueServiceFactory; +}; +export type TCertificateAuthorityQueueFactory = ReturnType; + +export const certificateAuthorityQueueFactory = ({ + certificateAuthorityCrlDAL, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + certificateDAL, + projectDAL, + kmsService, + queueService +}: TCertificateAuthorityQueueFactoryDep) => { + // TODO 1: auto-periodic rotation + // TODO 2: manual rotation + + const setCaCrlRotationInterval = async ({ caId, rotationIntervalDays }: TRotateCaCrlTriggerDTO) => { + const appCfg = getConfig(); + + // query for config + // const caCrl = await certificateAuthorityCrlDAL.findOne({ + // caId + // }); + + await queueService.queue( + // TODO: clarify queue + job naming + QueueName.CaCrlRotation, + QueueJobs.CaCrlRotation, + { + caId + }, + { + jobId: `ca-crl-rotation-${caId}`, + repeat: { + // on prod it this will be in days, in development this will be second + every: + appCfg.NODE_ENV === "development" + ? secondsToMillis(rotationIntervalDays) + : daysToMillisecond(rotationIntervalDays), + immediately: true + } + } + ); + }; + + queueService.start(QueueName.CaCrlRotation, async (job) => { + const { caId } = job.data; + logger.info(`secretReminderQueue.process: [secretDocument=${caId}]`); + + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id }); + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + const privateKey = await kmsDecryptor({ + cipherTextBlob: caSecret.encryptedPrivateKey + }); + + const skObj = crypto.createPrivateKey({ key: privateKey, format: "der", type: "pkcs8" }); + const sk = await crypto.subtle.importKey("pkcs8", skObj.export({ format: "der", type: "pkcs8" }), alg, true, [ + "sign" + ]); + + const revokedCerts = await certificateDAL.find({ + caId: ca.id, + status: CertStatus.REVOKED + }); + + const crl = await x509.X509CrlGenerator.create({ + issuer: ca.dn, + thisUpdate: new Date(), + nextUpdate: new Date("2025/12/12"), // TODO: depends on configured rebuild interval + entries: revokedCerts.map((revokedCert) => { + return { + serialNumber: revokedCert.serialNumber, + revocationDate: new Date(revokedCert.revokedAt as Date), + reason: revokedCert.revocationReason as number, + invalidity: new Date("2022/01/01"), + issuer: ca.dn + }; + }), + signingAlgorithm: alg, + signingKey: sk + }); + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: keyId + }); + const { cipherTextBlob: encryptedCrl } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(crl.rawData)) + }); + + await certificateAuthorityCrlDAL.update( + { + caId: ca.id + }, + { + encryptedCrl + } + ); + }); + + queueService.listen(QueueName.CaCrlRotation, "failed", (job, err) => { + logger.error(err, "Failed to rotate CA CRL %s", job?.id); + }); + + return { + setCaCrlRotationInterval + }; +}; diff --git a/backend/src/services/certificate-authority/certificate-authority-secret-dal.ts b/backend/src/services/certificate-authority/certificate-authority-secret-dal.ts new file mode 100644 index 0000000000..2ade72e7e3 --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-secret-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateAuthoritySecretDALFactory = ReturnType; + +export const certificateAuthoritySecretDALFactory = (db: TDbClient) => { + const caSecretOrm = ormify(db, TableName.CertificateAuthoritySecret); + return caSecretOrm; +}; diff --git a/backend/src/services/certificate-authority/certificate-authority-service.ts b/backend/src/services/certificate-authority/certificate-authority-service.ts new file mode 100644 index 0000000000..06efcf9e3d --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-service.ts @@ -0,0 +1,1852 @@ +/* eslint-disable no-bitwise */ +import { ForbiddenError } from "@casl/ability"; +import * as x509 from "@peculiar/x509"; +import crypto, { KeyObject } from "crypto"; +import ms from "ms"; +import { z } from "zod"; + +import { TCertificateAuthorities, TCertificateTemplates } from "@app/db/schemas"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TPkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal"; +import { TPkiCollectionItemDALFactory } from "@app/services/pki-collection/pki-collection-item-dal"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { TCertificateAuthorityCrlDALFactory } from "../../ee/services/certificate-authority-crl/certificate-authority-crl-dal"; +import { + CertExtendedKeyUsage, + CertExtendedKeyUsageOIDToName, + CertKeyAlgorithm, + CertKeyUsage, + CertStatus +} from "../certificate/certificate-types"; +import { TCertificateTemplateDALFactory } from "../certificate-template/certificate-template-dal"; +import { validateCertificateDetailsAgainstTemplate } from "../certificate-template/certificate-template-fns"; +import { TCertificateAuthorityCertDALFactory } from "./certificate-authority-cert-dal"; +import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal"; +import { + createDistinguishedName, + createSerialNumber, + getCaCertChain, // TODO: consider rename + getCaCertChains, + getCaCredentials, + keyAlgorithmToAlgCfg, + parseDistinguishedName +} from "./certificate-authority-fns"; +import { TCertificateAuthorityQueueFactory } from "./certificate-authority-queue"; +import { TCertificateAuthoritySecretDALFactory } from "./certificate-authority-secret-dal"; +import { + CaStatus, + CaType, + TCreateCaDTO, + TDeleteCaDTO, + TGetCaCertDTO, + TGetCaCertificateTemplatesDTO, + TGetCaCertsDTO, + TGetCaCsrDTO, + TGetCaDTO, + TImportCertToCaDTO, + TIssueCertFromCaDTO, + TRenewCaCertDTO, + TSignCertFromCaDTO, + TSignIntermediateDTO, + TUpdateCaDTO +} from "./certificate-authority-types"; +import { hostnameRegex } from "./certificate-authority-validators"; + +type TCertificateAuthorityServiceFactoryDep = { + certificateAuthorityDAL: Pick< + TCertificateAuthorityDALFactory, + "transaction" | "create" | "findById" | "updateById" | "deleteById" | "findOne" + >; + certificateAuthorityCertDAL: Pick< + TCertificateAuthorityCertDALFactory, + "create" | "findOne" | "transaction" | "find" | "findById" + >; + certificateAuthoritySecretDAL: Pick; + certificateAuthorityCrlDAL: Pick; + certificateTemplateDAL: Pick; + certificateAuthorityQueue: TCertificateAuthorityQueueFactory; // TODO: Pick + certificateDAL: Pick; + certificateBodyDAL: Pick; + pkiCollectionDAL: Pick; + pkiCollectionItemDAL: Pick; + projectDAL: Pick; + kmsService: Pick; + permissionService: Pick; +}; + +export type TCertificateAuthorityServiceFactory = ReturnType; + +export const certificateAuthorityServiceFactory = ({ + certificateAuthorityDAL, + certificateAuthorityCertDAL, + certificateAuthoritySecretDAL, + certificateAuthorityCrlDAL, + certificateTemplateDAL, + certificateDAL, + certificateBodyDAL, + pkiCollectionDAL, + pkiCollectionItemDAL, + projectDAL, + kmsService, + permissionService +}: TCertificateAuthorityServiceFactoryDep) => { + /** + * Generates new root or intermediate CA + */ + const createCa = async ({ + projectSlug, + type, + friendlyName, + commonName, + organization, + ou, + country, + province, + locality, + notBefore, + notAfter, + maxPathLength, + keyAlgorithm, + requireTemplateForIssuance, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TCreateCaDTO) => { + const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.CertificateAuthorities + ); + + const dn = createDistinguishedName({ + commonName, + organization, + ou, + country, + province, + locality + }); + + const alg = keyAlgorithmToAlgCfg(keyAlgorithm); + const keys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]); + + const newCa = await certificateAuthorityDAL.transaction(async (tx) => { + const notBeforeDate = notBefore ? new Date(notBefore) : new Date(); + + // if undefined, set [notAfterDate] to 10 years from now + const notAfterDate = notAfter + ? new Date(notAfter) + : new Date(new Date().setFullYear(new Date().getFullYear() + 10)); + + const serialNumber = createSerialNumber(); + + const ca = await certificateAuthorityDAL.create( + { + projectId: project.id, + type, + organization, + ou, + country, + province, + locality, + friendlyName: friendlyName || dn, + commonName, + status: type === CaType.ROOT ? CaStatus.ACTIVE : CaStatus.PENDING_CERTIFICATE, + dn, + keyAlgorithm, + ...(type === CaType.ROOT && { + maxPathLength, + notBefore: notBeforeDate, + notAfter: notAfterDate, + serialNumber + }), + requireTemplateForIssuance + }, + tx + ); + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: project.id, + projectDAL, + kmsService + }); + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + // https://nodejs.org/api/crypto.html#static-method-keyobjectfromkey + const skObj = KeyObject.from(keys.privateKey); + + const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({ + plainText: skObj.export({ + type: "pkcs8", + format: "der" + }) + }); + + const caSecret = await certificateAuthoritySecretDAL.create( + { + caId: ca.id, + encryptedPrivateKey + }, + tx + ); + + if (type === CaType.ROOT) { + // note: create self-signed cert only applicable for root CA + const cert = await x509.X509CertificateGenerator.createSelfSigned({ + name: dn, + serialNumber, + notBefore: notBeforeDate, + notAfter: notAfterDate, + signingAlgorithm: alg, + keys, + extensions: [ + new x509.BasicConstraintsExtension(true, maxPathLength === -1 ? undefined : maxPathLength, true), + // eslint-disable-next-line no-bitwise + new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true), + await x509.SubjectKeyIdentifierExtension.create(keys.publicKey) + ] + }); + + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(cert.rawData)) + }); + + const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({ + plainText: Buffer.alloc(0) + }); + + const caCert = await certificateAuthorityCertDAL.create( + { + caId: ca.id, + encryptedCertificate, + encryptedCertificateChain, + version: 1, + caSecretId: caSecret.id + }, + tx + ); + + await certificateAuthorityDAL.updateById( + ca.id, + { + activeCaCertId: caCert.id + }, + tx + ); + } + + // create empty CRL + const crl = await x509.X509CrlGenerator.create({ + issuer: ca.dn, + thisUpdate: new Date(), + nextUpdate: new Date("2025/12/12"), // TODO: change + entries: [], + signingAlgorithm: alg, + signingKey: keys.privateKey + }); + + const { cipherTextBlob: encryptedCrl } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(crl.rawData)) + }); + + await certificateAuthorityCrlDAL.create( + { + caId: ca.id, + encryptedCrl, + caSecretId: caSecret.id + }, + tx + ); + + return ca; + }); + + return newCa; + }; + + /** + * Return CA with id [caId] + */ + const getCaById = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateAuthorities + ); + + return ca; + }; + + /** + * Update CA with id [caId]. + * Note: Used to enable/disable CA + */ + const updateCaById = async ({ + caId, + status, + requireTemplateForIssuance, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TUpdateCaDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + ProjectPermissionSub.CertificateAuthorities + ); + + const updatedCa = await certificateAuthorityDAL.updateById(caId, { status, requireTemplateForIssuance }); + + return updatedCa; + }; + + /** + * Delete CA with id [caId] + */ + const deleteCaById = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteCaDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + ProjectPermissionSub.CertificateAuthorities + ); + + const deletedCa = await certificateAuthorityDAL.deleteById(caId); + + return deletedCa; + }; + + /** + * Return certificate signing request (CSR) made with CA with id [caId] + */ + const getCaCsr = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCsrDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.CertificateAuthorities + ); + + if (ca.type === CaType.ROOT) throw new BadRequestError({ message: "Root CA cannot generate CSR" }); + + const { caPrivateKey, caPublicKey } = await getCaCredentials({ + caId, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService + }); + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + + const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({ + name: ca.dn, + keys: { + privateKey: caPrivateKey, + publicKey: caPublicKey + }, + signingAlgorithm: alg, + extensions: [ + // eslint-disable-next-line no-bitwise + new x509.KeyUsagesExtension( + x509.KeyUsageFlags.keyCertSign | + x509.KeyUsageFlags.cRLSign | + x509.KeyUsageFlags.digitalSignature | + x509.KeyUsageFlags.keyEncipherment + ) + ], + attributes: [new x509.ChallengePasswordAttribute("password")] + }); + + return { + csr: csrObj.toString("pem"), + ca + }; + }; + + /** + * Renew certificate for CA with id [caId] + * Note 1: This CA renewal method is only applicable to CAs with internal parent CAs + * Note 2: Currently implements CA renewal with same key-pair only + */ + const renewCaCert = async ({ caId, notAfter, actorId, actorAuthMethod, actor, actorOrgId }: TRenewCaCertDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.CertificateAuthorities + ); + + if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" }); + + // get latest CA certificate + const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId); + + const serialNumber = createSerialNumber(); + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const { caPrivateKey, caPublicKey, caSecret } = await getCaCredentials({ + caId: ca.id, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService + }); + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + const decryptedCaCert = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificate + }); + + const caCertObj = new x509.X509Certificate(decryptedCaCert); + + let certificate = ""; + let certificateChain = ""; + + switch (ca.type) { + case CaType.ROOT: { + if (new Date(notAfter) <= new Date(caCertObj.notAfter)) { + throw new BadRequestError({ + message: + "New Root CA certificate must have notAfter date that is greater than the current certificate notAfter date" + }); + } + + const notBeforeDate = new Date(); + const cert = await x509.X509CertificateGenerator.createSelfSigned({ + name: ca.dn, + serialNumber, + notBefore: notBeforeDate, + notAfter: new Date(notAfter), + signingAlgorithm: alg, + keys: { + privateKey: caPrivateKey, + publicKey: caPublicKey + }, + extensions: [ + new x509.BasicConstraintsExtension( + true, + ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength, + true + ), + // eslint-disable-next-line no-bitwise + new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true), + await x509.SubjectKeyIdentifierExtension.create(caPublicKey) + ] + }); + + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(cert.rawData)) + }); + + const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({ + plainText: Buffer.alloc(0) + }); + + await certificateAuthorityDAL.transaction(async (tx) => { + const newCaCert = await certificateAuthorityCertDAL.create( + { + caId: ca.id, + encryptedCertificate, + encryptedCertificateChain, + version: caCert.version + 1, + caSecretId: caSecret.id + }, + tx + ); + + await certificateAuthorityDAL.updateById( + ca.id, + { + activeCaCertId: newCaCert.id, + notBefore: notBeforeDate, + notAfter: new Date(notAfter) + }, + tx + ); + }); + + certificate = cert.toString("pem"); + break; + } + case CaType.INTERMEDIATE: { + if (!ca.parentCaId) { + // TODO: look into optimal way to support renewal of intermediate CA with external parent CA + throw new BadRequestError({ + message: "Failed to renew intermediate CA certificate with external parent CA" + }); + } + + const parentCa = await certificateAuthorityDAL.findById(ca.parentCaId); + const { caPrivateKey: parentCaPrivateKey } = await getCaCredentials({ + caId: parentCa.id, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService + }); + + // get latest parent CA certificate + if (!parentCa.activeCaCertId) + throw new BadRequestError({ message: "Parent CA does not have a certificate installed" }); + const parentCaCert = await certificateAuthorityCertDAL.findById(parentCa.activeCaCertId); + + const decryptedParentCaCert = await kmsDecryptor({ + cipherTextBlob: parentCaCert.encryptedCertificate + }); + + const parentCaCertObj = new x509.X509Certificate(decryptedParentCaCert); + + if (new Date(notAfter) <= new Date(caCertObj.notAfter)) { + throw new BadRequestError({ + message: + "New Intermediate CA certificate must have notAfter date that is greater than the current certificate notAfter date" + }); + } + + if (new Date(notAfter) > new Date(parentCaCertObj.notAfter)) { + throw new BadRequestError({ + message: + "New Intermediate CA certificate must have notAfter date that is equal to or smaller than the notAfter date of the parent CA certificate current certificate notAfter date" + }); + } + + const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({ + name: ca.dn, + keys: { + privateKey: caPrivateKey, + publicKey: caPublicKey + }, + signingAlgorithm: alg, + extensions: [ + // eslint-disable-next-line no-bitwise + new x509.KeyUsagesExtension( + x509.KeyUsageFlags.keyCertSign | + x509.KeyUsageFlags.cRLSign | + x509.KeyUsageFlags.digitalSignature | + x509.KeyUsageFlags.keyEncipherment + ) + ], + attributes: [new x509.ChallengePasswordAttribute("password")] + }); + + const notBeforeDate = new Date(); + const intermediateCert = await x509.X509CertificateGenerator.create({ + serialNumber, + subject: csrObj.subject, + issuer: parentCaCertObj.subject, + notBefore: notBeforeDate, + notAfter: new Date(notAfter), + signingKey: parentCaPrivateKey, + publicKey: csrObj.publicKey, + signingAlgorithm: alg, + extensions: [ + new x509.KeyUsagesExtension( + x509.KeyUsageFlags.keyCertSign | + x509.KeyUsageFlags.cRLSign | + x509.KeyUsageFlags.digitalSignature | + x509.KeyUsageFlags.keyEncipherment, + true + ), + new x509.BasicConstraintsExtension( + true, + ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength, + true + ), + await x509.AuthorityKeyIdentifierExtension.create(parentCaCertObj, false), + await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey) + ] + }); + + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(intermediateCert.rawData)) + }); + + const { caCert: parentCaCertificate, caCertChain: parentCaCertChain } = await getCaCertChain({ + caCertId: parentCa.activeCaCertId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + certificateChain = `${parentCaCertificate}\n${parentCaCertChain}`.trim(); + + const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({ + plainText: Buffer.from(certificateChain) + }); + + await certificateAuthorityDAL.transaction(async (tx) => { + const newCaCert = await certificateAuthorityCertDAL.create( + { + caId: ca.id, + encryptedCertificate, + encryptedCertificateChain, + version: caCert.version + 1, + caSecretId: caSecret.id + }, + tx + ); + + await certificateAuthorityDAL.updateById( + ca.id, + { + activeCaCertId: newCaCert.id, + notBefore: notBeforeDate, + notAfter: new Date(notAfter) + }, + tx + ); + }); + + certificate = intermediateCert.toString("pem"); + break; + } + default: { + throw new BadRequestError({ + message: "Unrecognized CA type" + }); + } + } + + return { + certificate, + certificateChain, + serialNumber, + ca + }; + }; + + const getCaCerts = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertsDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateAuthorities + ); + + const caCertChains = await getCaCertChains({ + caId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + return { + ca, + caCerts: caCertChains + }; + }; + + /** + * Return current certificate and certificate chain for CA + */ + const getCaCert = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateAuthorities + ); + + const { caCert, caCertChain, serialNumber } = await getCaCertChain({ + caCertId: ca.activeCaCertId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + return { + certificate: caCert, + certificateChain: caCertChain, + serialNumber, + ca + }; + }; + + /** + * Return CA certificate object by ID + */ + const getCaCertById = async ({ caId, caCertId }: { caId: string; caCertId: string }) => { + const caCert = await certificateAuthorityCertDAL.findOne({ + caId, + id: caCertId + }); + + if (!caCert) { + throw new NotFoundError({ message: `Ca certificate with ID '${caCertId}' not found for CA with ID '${caId}'` }); + } + + const ca = await certificateAuthorityDAL.findById(caId); + const keyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: keyId + }); + + const decryptedCaCert = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificate + }); + + const caCertObj = new x509.X509Certificate(decryptedCaCert); + + return caCertObj; + }; + + /** + * Issue certificate to be imported back in for intermediate CA + */ + const signIntermediate = async ({ + caId, + actorId, + actorAuthMethod, + actor, + actorOrgId, + csr, + notBefore, + notAfter, + maxPathLength + }: TSignIntermediateDTO) => { + const appCfg = getConfig(); + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: "CA not found" }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.CertificateAuthorities + ); + + if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" }); + if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" }); + + const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId); + + if (ca.notAfter && new Date() > new Date(ca.notAfter)) { + throw new BadRequestError({ message: "CA is expired" }); + } + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const decryptedCaCert = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificate + }); + + const caCertObj = new x509.X509Certificate(decryptedCaCert); + const csrObj = new x509.Pkcs10CertificateRequest(csr); + + // check path length constraint + const caPathLength = caCertObj.getExtension(x509.BasicConstraintsExtension)?.pathLength; + if (caPathLength !== undefined) { + if (caPathLength === 0) + throw new BadRequestError({ + message: "Failed to issue intermediate certificate due to CA path length constraint" + }); + if (maxPathLength >= caPathLength || (maxPathLength === -1 && caPathLength !== -1)) + throw new BadRequestError({ + message: "The requested path length constraint exceeds the CA's allowed path length" + }); + } + + const notBeforeDate = notBefore ? new Date(notBefore) : new Date(); + const notAfterDate = new Date(notAfter); + + const caCertNotBeforeDate = new Date(caCertObj.notBefore); + const caCertNotAfterDate = new Date(caCertObj.notAfter); + + // check not before constraint + if (notBeforeDate < caCertNotBeforeDate) { + throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" }); + } + + if (notBeforeDate > notAfterDate) throw new BadRequestError({ message: "notBefore date is after notAfter date" }); + + // check not after constraint + if (notAfterDate > caCertNotAfterDate) { + throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" }); + } + + const { caPrivateKey, caSecret } = await getCaCredentials({ + caId: ca.id, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService + }); + + const serialNumber = createSerialNumber(); + + const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id }); + const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`; + + const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`; + const intermediateCert = await x509.X509CertificateGenerator.create({ + serialNumber, + subject: csrObj.subject, + issuer: caCertObj.subject, + notBefore: notBeforeDate, + notAfter: notAfterDate, + signingKey: caPrivateKey, + publicKey: csrObj.publicKey, + signingAlgorithm: alg, + extensions: [ + new x509.KeyUsagesExtension( + x509.KeyUsageFlags.keyCertSign | + x509.KeyUsageFlags.cRLSign | + x509.KeyUsageFlags.digitalSignature | + x509.KeyUsageFlags.keyEncipherment, + true + ), + new x509.BasicConstraintsExtension(true, maxPathLength === -1 ? undefined : maxPathLength, true), + await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false), + await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey), + new x509.CRLDistributionPointsExtension([distributionPointUrl]), + new x509.AuthorityInfoAccessExtension({ + caIssuers: new x509.GeneralName("url", caIssuerUrl) + }) + ] + }); + + const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({ + caCertId: ca.activeCaCertId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + return { + certificate: intermediateCert.toString("pem"), + issuingCaCertificate, + certificateChain: `${issuingCaCertificate}\n${caCertChain}`.trim(), + serialNumber: intermediateCert.serialNumber, + ca + }; + }; + + /** + * Import certificate for CA with id [caId]. + * Note: Can be used to import an external certificate and certificate chain + * to be into an installed or uninstalled CA. + */ + const importCertToCa = async ({ + caId, + actorId, + actorAuthMethod, + actor, + actorOrgId, + certificate, + certificateChain + }: TImportCertToCaDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.CertificateAuthorities + ); + + if (ca.parentCaId) { + /** + * re-evaluate in the future if we should allow users to import a new CA certificate for an intermediate + * CA chained to an internal parent CA. Doing so would allow users to re-chain the CA to a different + * internal CA. + */ + throw new BadRequestError({ + message: "Cannot import certificate to intermediate CA chained to internal parent CA" + }); + } + + const caCert = ca.activeCaCertId ? await certificateAuthorityCertDAL.findById(ca.activeCaCertId) : undefined; + + const certObj = new x509.X509Certificate(certificate); + const maxPathLength = certObj.getExtension(x509.BasicConstraintsExtension)?.pathLength; + + // validate imported certificate and certificate chain + const certificates = certificateChain + .match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g) + ?.map((cert) => new x509.X509Certificate(cert)); + + if (!certificates) throw new BadRequestError({ message: "Failed to parse certificate chain" }); + + const chain = new x509.X509ChainBuilder({ + certificates + }); + + const chainItems = await chain.build(certObj); + + // chain.build() implicitly verifies the chain + if (chainItems.length !== certificates.length + 1) + throw new BadRequestError({ message: "Invalid certificate chain" }); + + const parentCertObj = chainItems[1]; + const parentCertSubject = parentCertObj.subject; + + const parentCa = await certificateAuthorityDAL.findOne({ + projectId: ca.projectId, + dn: parentCertSubject + }); + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(certObj.rawData)) + }); + + const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({ + plainText: Buffer.from(certificateChain) + }); + + // TODO: validate that latest key-pair of CA is used to sign the certificate + // once renewal with new key pair is supported + const { caSecret, caPublicKey } = await getCaCredentials({ + caId: ca.id, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService + }); + + const isCaAndCertPublicKeySame = Buffer.from(await crypto.subtle.exportKey("spki", caPublicKey)).equals( + Buffer.from(certObj.publicKey.rawData) + ); + + if (!isCaAndCertPublicKeySame) { + throw new BadRequestError({ message: "CA and certificate public key do not match" }); + } + + await certificateAuthorityCertDAL.transaction(async (tx) => { + const newCaCert = await certificateAuthorityCertDAL.create( + { + caId: ca.id, + encryptedCertificate, + encryptedCertificateChain, + version: caCert ? caCert.version + 1 : 1, + caSecretId: caSecret.id + }, + tx + ); + + await certificateAuthorityDAL.updateById( + ca.id, + { + status: CaStatus.ACTIVE, + maxPathLength: maxPathLength === undefined ? -1 : maxPathLength, + notBefore: new Date(certObj.notBefore), + notAfter: new Date(certObj.notAfter), + serialNumber: certObj.serialNumber, + parentCaId: parentCa?.id, + activeCaCertId: newCaCert.id + }, + tx + ); + }); + + return { ca }; + }; + + /** + * Return new leaf certificate issued by CA with id [caId] and private key. + * Note: private key and CSR are generated within Infisical. + */ + const issueCertFromCa = async ({ + caId, + certificateTemplateId, + pkiCollectionId, + friendlyName, + commonName, + altNames, + ttl, + notBefore, + notAfter, + actorId, + actorAuthMethod, + actor, + actorOrgId, + keyUsages, + extendedKeyUsages + }: TIssueCertFromCaDTO) => { + let ca: TCertificateAuthorities | undefined; + let certificateTemplate: TCertificateTemplates | undefined; + let collectionId = pkiCollectionId; + + if (caId) { + ca = await certificateAuthorityDAL.findById(caId); + } else if (certificateTemplateId) { + certificateTemplate = await certificateTemplateDAL.getById(certificateTemplateId); + if (!certificateTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID '${certificateTemplateId}' not found` + }); + } + + collectionId = certificateTemplate.pkiCollectionId as string; + ca = await certificateAuthorityDAL.findById(certificateTemplate.caId); + } + + if (!ca) { + throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates); + + if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" }); + if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" }); + if (ca.requireTemplateForIssuance && !certificateTemplate) { + throw new BadRequestError({ message: "Certificate template is required for issuance" }); + } + const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId); + + if (ca.notAfter && new Date() > new Date(ca.notAfter)) { + throw new BadRequestError({ message: "CA is expired" }); + } + + // check PKI collection + if (collectionId) { + const pkiCollection = await pkiCollectionDAL.findById(collectionId); + if (!pkiCollection) throw new NotFoundError({ message: "PKI collection not found" }); + if (pkiCollection.projectId !== ca.projectId) throw new BadRequestError({ message: "Invalid PKI collection" }); + } + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const decryptedCaCert = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificate + }); + + const caCertObj = new x509.X509Certificate(decryptedCaCert); + + const notBeforeDate = notBefore ? new Date(notBefore) : new Date(); + + let notAfterDate = new Date(new Date().setFullYear(new Date().getFullYear() + 1)); + if (notAfter) { + notAfterDate = new Date(notAfter); + } else if (ttl) { + notAfterDate = new Date(new Date().getTime() + ms(ttl)); + } + + const caCertNotBeforeDate = new Date(caCertObj.notBefore); + const caCertNotAfterDate = new Date(caCertObj.notAfter); + + // check not before constraint + if (notBeforeDate < caCertNotBeforeDate) { + throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" }); + } + + if (notBeforeDate > notAfterDate) throw new BadRequestError({ message: "notBefore date is after notAfter date" }); + + // check not after constraint + if (notAfterDate > caCertNotAfterDate) { + throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" }); + } + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + const leafKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]); + + const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({ + name: `CN=${commonName}`, + keys: leafKeys, + signingAlgorithm: alg, + extensions: [ + // eslint-disable-next-line no-bitwise + new x509.KeyUsagesExtension(x509.KeyUsageFlags.digitalSignature | x509.KeyUsageFlags.keyEncipherment) + ], + attributes: [new x509.ChallengePasswordAttribute("password")] + }); + + const { caPrivateKey, caSecret } = await getCaCredentials({ + caId: ca.id, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService + }); + + const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id }); + const appCfg = getConfig(); + + const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`; + const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`; + + const extensions: x509.Extension[] = [ + new x509.BasicConstraintsExtension(false), + new x509.CRLDistributionPointsExtension([distributionPointUrl]), + await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false), + await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey), + new x509.AuthorityInfoAccessExtension({ + caIssuers: new x509.GeneralName("url", caIssuerUrl) + }), + new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy + ]; + + // handle key usages + let selectedKeyUsages: CertKeyUsage[] = keyUsages ?? []; + if (keyUsages === undefined && !certificateTemplate) { + selectedKeyUsages = [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]; + } + + if (keyUsages === undefined && certificateTemplate) { + selectedKeyUsages = (certificateTemplate.keyUsages ?? []) as CertKeyUsage[]; + } + + if (keyUsages?.length && certificateTemplate) { + const validKeyUsages = certificateTemplate.keyUsages || []; + if (keyUsages.some((keyUsage) => !validKeyUsages.includes(keyUsage))) { + throw new BadRequestError({ + message: "Invalid key usage value based on template policy" + }); + } + selectedKeyUsages = keyUsages; + } + + const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0); + if (keyUsagesBitValue) { + extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true)); + } + + // handle extended key usages + let selectedExtendedKeyUsages: CertExtendedKeyUsage[] = extendedKeyUsages ?? []; + if (extendedKeyUsages === undefined && certificateTemplate) { + selectedExtendedKeyUsages = (certificateTemplate.extendedKeyUsages ?? []) as CertExtendedKeyUsage[]; + } + + if (extendedKeyUsages?.length && certificateTemplate) { + const validExtendedKeyUsages = certificateTemplate.extendedKeyUsages || []; + if (extendedKeyUsages.some((eku) => !validExtendedKeyUsages.includes(eku))) { + throw new BadRequestError({ + message: "Invalid extended key usage value based on template policy" + }); + } + selectedExtendedKeyUsages = extendedKeyUsages; + } + + if (selectedExtendedKeyUsages.length) { + extensions.push( + new x509.ExtendedKeyUsageExtension( + selectedExtendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku]), + true + ) + ); + } + + let altNamesArray: { + type: "email" | "dns"; + value: string; + }[] = []; + + if (altNames) { + altNamesArray = altNames + .split(",") + .map((name) => name.trim()) + .map((altName) => { + // check if the altName is a valid email + if (z.string().email().safeParse(altName).success) { + return { + type: "email", + value: altName + }; + } + + // check if the altName is a valid hostname + if (hostnameRegex.test(altName)) { + return { + type: "dns", + value: altName + }; + } + + // If altName is neither a valid email nor a valid hostname, throw an error or handle it accordingly + throw new Error(`Invalid altName: ${altName}`); + }); + + const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false); + extensions.push(altNamesExtension); + } + + if (certificateTemplate) { + validateCertificateDetailsAgainstTemplate( + { + commonName, + notBeforeDate, + notAfterDate, + altNames: altNamesArray.map((entry) => entry.value) + }, + certificateTemplate + ); + } + + const serialNumber = createSerialNumber(); + const leafCert = await x509.X509CertificateGenerator.create({ + serialNumber, + subject: csrObj.subject, + issuer: caCertObj.subject, + notBefore: notBeforeDate, + notAfter: notAfterDate, + signingKey: caPrivateKey, + publicKey: csrObj.publicKey, + signingAlgorithm: alg, + extensions + }); + + const skLeafObj = KeyObject.from(leafKeys.privateKey); + const skLeaf = skLeafObj.export({ format: "pem", type: "pkcs8" }) as string; + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(leafCert.rawData)) + }); + + await certificateDAL.transaction(async (tx) => { + const cert = await certificateDAL.create( + { + caId: (ca as TCertificateAuthorities).id, + caCertId: caCert.id, + certificateTemplateId: certificateTemplate?.id, + status: CertStatus.ACTIVE, + friendlyName: friendlyName || commonName, + commonName, + altNames, + serialNumber, + notBefore: notBeforeDate, + notAfter: notAfterDate, + keyUsages: selectedKeyUsages, + extendedKeyUsages: selectedExtendedKeyUsages + }, + tx + ); + + await certificateBodyDAL.create( + { + certId: cert.id, + encryptedCertificate + }, + tx + ); + + if (collectionId) { + await pkiCollectionItemDAL.create( + { + pkiCollectionId: collectionId, + certId: cert.id + }, + tx + ); + } + + return cert; + }); + + const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({ + caCertId: caCert.id, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + return { + certificate: leafCert.toString("pem"), + certificateChain: `${issuingCaCertificate}\n${caCertChain}`.trim(), + issuingCaCertificate, + privateKey: skLeaf, + serialNumber, + ca + }; + }; + + /** + * Return new leaf certificate issued by CA with id [caId]. + * Note: CSR is generated externally and submitted to Infisical. + */ + const signCertFromCa = async (dto: TSignCertFromCaDTO) => { + const appCfg = getConfig(); + let ca: TCertificateAuthorities | undefined; + let certificateTemplate: TCertificateTemplates | undefined; + + const { + caId, + certificateTemplateId, + csr, + pkiCollectionId, + friendlyName, + commonName, + altNames, + ttl, + notBefore, + notAfter, + keyUsages, + extendedKeyUsages + } = dto; + + let collectionId = pkiCollectionId; + + if (caId) { + ca = await certificateAuthorityDAL.findById(caId); + } else if (certificateTemplateId) { + certificateTemplate = await certificateTemplateDAL.getById(certificateTemplateId); + if (!certificateTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID '${certificateTemplateId}' not found` + }); + } + + collectionId = certificateTemplate.pkiCollectionId as string; + ca = await certificateAuthorityDAL.findById(certificateTemplate.caId); + } + + if (!ca) { + throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + } + + if (!dto.isInternal) { + const { permission } = await permissionService.getProjectPermission( + dto.actor, + dto.actorId, + ca.projectId, + dto.actorAuthMethod, + dto.actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.Certificates + ); + } + + if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" }); + if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" }); + if (ca.requireTemplateForIssuance && !certificateTemplate) { + throw new BadRequestError({ message: "Certificate template is required for issuance" }); + } + + const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId); + + if (ca.notAfter && new Date() > new Date(ca.notAfter)) { + throw new BadRequestError({ message: "CA is expired" }); + } + + // check PKI collection + if (pkiCollectionId) { + const pkiCollection = await pkiCollectionDAL.findById(pkiCollectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${pkiCollectionId}' not found` }); + if (pkiCollection.projectId !== ca.projectId) throw new BadRequestError({ message: "Invalid PKI collection" }); + } + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const decryptedCaCert = await kmsDecryptor({ + cipherTextBlob: caCert.encryptedCertificate + }); + + const caCertObj = new x509.X509Certificate(decryptedCaCert); + + const notBeforeDate = notBefore ? new Date(notBefore) : new Date(); + + let notAfterDate = new Date(new Date().setFullYear(new Date().getFullYear() + 1)); + if (notAfter) { + notAfterDate = new Date(notAfter); + } else if (ttl) { + notAfterDate = new Date(new Date().getTime() + ms(ttl)); + } else if (certificateTemplate?.ttl) { + notAfterDate = new Date(new Date().getTime() + ms(certificateTemplate.ttl)); + } + + const caCertNotBeforeDate = new Date(caCertObj.notBefore); + const caCertNotAfterDate = new Date(caCertObj.notAfter); + + // check not before constraint + if (notBeforeDate < caCertNotBeforeDate) { + throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" }); + } + + if (notBeforeDate > notAfterDate) throw new BadRequestError({ message: "notBefore date is after notAfter date" }); + + // check not after constraint + if (notAfterDate > caCertNotAfterDate) { + throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" }); + } + + const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm); + + const csrObj = new x509.Pkcs10CertificateRequest(csr); + + const dn = parseDistinguishedName(csrObj.subject); + const cn = commonName || dn.commonName; + + if (!cn) + throw new BadRequestError({ + message: "A common name (CN) is required in the CSR or as a parameter to this endpoint" + }); + + const { caPrivateKey, caSecret } = await getCaCredentials({ + caId: ca.id, + certificateAuthorityDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService + }); + + const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id }); + const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`; + + const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`; + const extensions: x509.Extension[] = [ + new x509.BasicConstraintsExtension(false), + await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false), + await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey), + new x509.CRLDistributionPointsExtension([distributionPointUrl]), + new x509.AuthorityInfoAccessExtension({ + caIssuers: new x509.GeneralName("url", caIssuerUrl) + }), + new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy + ]; + + // handle key usages + const csrKeyUsageExtension = csrObj.getExtension("2.5.29.15") as x509.KeyUsagesExtension; + let csrKeyUsages: CertKeyUsage[] = []; + if (csrKeyUsageExtension) { + csrKeyUsages = Object.values(CertKeyUsage).filter( + (keyUsage) => (x509.KeyUsageFlags[keyUsage] & csrKeyUsageExtension.usages) !== 0 + ); + } + + let selectedKeyUsages: CertKeyUsage[] = keyUsages ?? []; + if (keyUsages === undefined && !certificateTemplate) { + if (csrKeyUsageExtension) { + selectedKeyUsages = csrKeyUsages; + } else { + selectedKeyUsages = [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]; + } + } + + if (keyUsages === undefined && certificateTemplate) { + if (csrKeyUsageExtension) { + const validKeyUsages = certificateTemplate.keyUsages || []; + if (csrKeyUsages.some((keyUsage) => !validKeyUsages.includes(keyUsage))) { + throw new BadRequestError({ + message: "Invalid key usage value based on template policy" + }); + } + selectedKeyUsages = csrKeyUsages; + } else { + selectedKeyUsages = (certificateTemplate.keyUsages ?? []) as CertKeyUsage[]; + } + } + + if (keyUsages?.length && certificateTemplate) { + const validKeyUsages = certificateTemplate.keyUsages || []; + if (keyUsages.some((keyUsage) => !validKeyUsages.includes(keyUsage))) { + throw new BadRequestError({ + message: "Invalid key usage value based on template policy" + }); + } + selectedKeyUsages = keyUsages; + } + + const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0); + if (keyUsagesBitValue) { + extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true)); + } + + // handle extended key usages + const csrExtendedKeyUsageExtension = csrObj.getExtension("2.5.29.37") as x509.ExtendedKeyUsageExtension; + let csrExtendedKeyUsages: CertExtendedKeyUsage[] = []; + if (csrExtendedKeyUsageExtension) { + csrExtendedKeyUsages = csrExtendedKeyUsageExtension.usages.map( + (ekuOid) => CertExtendedKeyUsageOIDToName[ekuOid as string] + ); + } + + let selectedExtendedKeyUsages: CertExtendedKeyUsage[] = extendedKeyUsages ?? []; + if (extendedKeyUsages === undefined && !certificateTemplate && csrExtendedKeyUsageExtension) { + selectedExtendedKeyUsages = csrExtendedKeyUsages; + } + + if (extendedKeyUsages === undefined && certificateTemplate) { + if (csrExtendedKeyUsageExtension) { + const validExtendedKeyUsages = certificateTemplate.extendedKeyUsages || []; + if (csrExtendedKeyUsages.some((eku) => !validExtendedKeyUsages.includes(eku))) { + throw new BadRequestError({ + message: "Invalid extended key usage value based on template policy" + }); + } + selectedExtendedKeyUsages = csrExtendedKeyUsages; + } else { + selectedExtendedKeyUsages = (certificateTemplate.extendedKeyUsages ?? []) as CertExtendedKeyUsage[]; + } + } + + if (extendedKeyUsages?.length && certificateTemplate) { + const validExtendedKeyUsages = certificateTemplate.extendedKeyUsages || []; + if (extendedKeyUsages.some((keyUsage) => !validExtendedKeyUsages.includes(keyUsage))) { + throw new BadRequestError({ + message: "Invalid extended key usage value based on template policy" + }); + } + selectedExtendedKeyUsages = extendedKeyUsages; + } + + if (selectedExtendedKeyUsages.length) { + extensions.push( + new x509.ExtendedKeyUsageExtension( + selectedExtendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku]), + true + ) + ); + } + + let altNamesFromCsr: string = ""; + let altNamesArray: { + type: "email" | "dns"; + value: string; + }[] = []; + if (altNames) { + altNamesArray = altNames + .split(",") + .map((name) => name.trim()) + .map((altName) => { + // check if the altName is a valid email + if (z.string().email().safeParse(altName).success) { + return { + type: "email", + value: altName + }; + } + + // check if the altName is a valid hostname + if (hostnameRegex.test(altName)) { + return { + type: "dns", + value: altName + }; + } + + // If altName is neither a valid email nor a valid hostname, throw an error or handle it accordingly + throw new Error(`Invalid altName: ${altName}`); + }); + } else { + // attempt to read from CSR if altNames is not explicitly provided + const sanExtension = csrObj.extensions.find((ext) => ext.type === "2.5.29.17"); + if (sanExtension) { + const sanNames = new x509.GeneralNames(sanExtension.value); + + altNamesArray = sanNames.items + .filter((value) => value.type === "email" || value.type === "dns") + .map((name) => ({ + type: name.type as "email" | "dns", + value: name.value + })); + + altNamesFromCsr = sanNames.items.map((item) => item.value).join(","); + } + } + + if (altNamesArray.length) { + const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false); + extensions.push(altNamesExtension); + } + + if (certificateTemplate) { + validateCertificateDetailsAgainstTemplate( + { + commonName: cn, + notBeforeDate, + notAfterDate, + altNames: altNamesArray.map((entry) => entry.value) + }, + certificateTemplate + ); + } + + const serialNumber = createSerialNumber(); + const leafCert = await x509.X509CertificateGenerator.create({ + serialNumber, + subject: csrObj.subject, + issuer: caCertObj.subject, + notBefore: notBeforeDate, + notAfter: notAfterDate, + signingKey: caPrivateKey, + publicKey: csrObj.publicKey, + signingAlgorithm: alg, + extensions + }); + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(leafCert.rawData)) + }); + + await certificateDAL.transaction(async (tx) => { + const cert = await certificateDAL.create( + { + caId: (ca as TCertificateAuthorities).id, + caCertId: caCert.id, + certificateTemplateId: certificateTemplate?.id, + status: CertStatus.ACTIVE, + friendlyName: friendlyName || csrObj.subject, + commonName: cn, + altNames: altNamesFromCsr || altNames, + serialNumber, + notBefore: notBeforeDate, + notAfter: notAfterDate, + keyUsages: selectedKeyUsages, + extendedKeyUsages: selectedExtendedKeyUsages + }, + tx + ); + + await certificateBodyDAL.create( + { + certId: cert.id, + encryptedCertificate + }, + tx + ); + + if (collectionId) { + await pkiCollectionItemDAL.create( + { + pkiCollectionId: collectionId, + certId: cert.id + }, + tx + ); + } + + return cert; + }); + + const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({ + caCertId: ca.activeCaCertId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + return { + certificate: leafCert, + certificateChain: `${issuingCaCertificate}\n${caCertChain}`.trim(), + issuingCaCertificate, + serialNumber, + ca + }; + }; + + /** + * Return list of certificate templates for CA with id [caId]. + */ + const getCaCertificateTemplates = async ({ + caId, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TGetCaCertificateTemplatesDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateTemplates + ); + + const certificateTemplates = await certificateTemplateDAL.find({ caId }); + + return { + certificateTemplates, + ca + }; + }; + + return { + createCa, + getCaById, + updateCaById, + deleteCaById, + getCaCsr, + renewCaCert, + getCaCerts, + getCaCert, + getCaCertById, + signIntermediate, + importCertToCa, + issueCertFromCa, + signCertFromCa, + getCaCertificateTemplates + }; +}; diff --git a/backend/src/services/certificate-authority/certificate-authority-types.ts b/backend/src/services/certificate-authority/certificate-authority-types.ts new file mode 100644 index 0000000000..e2f5233482 --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-types.ts @@ -0,0 +1,186 @@ +import { TProjectPermission } from "@app/lib/types"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; + +import { TCertificateAuthorityCrlDALFactory } from "../../ee/services/certificate-authority-crl/certificate-authority-crl-dal"; +import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "../certificate/certificate-types"; +import { TCertificateAuthorityCertDALFactory } from "./certificate-authority-cert-dal"; +import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal"; +import { TCertificateAuthoritySecretDALFactory } from "./certificate-authority-secret-dal"; + +export enum CaType { + ROOT = "root", + INTERMEDIATE = "intermediate" +} + +export enum CaStatus { + ACTIVE = "active", + DISABLED = "disabled", + PENDING_CERTIFICATE = "pending-certificate" +} + +export enum CaRenewalType { + EXISTING = "existing" +} + +export type TCreateCaDTO = { + projectSlug: string; + type: CaType; + friendlyName?: string; + commonName: string; + organization: string; + ou: string; + country: string; + province: string; + locality: string; + notBefore?: string; + notAfter?: string; + maxPathLength: number; + keyAlgorithm: CertKeyAlgorithm; + requireTemplateForIssuance: boolean; +} & Omit; + +export type TGetCaDTO = { + caId: string; +} & Omit; + +export type TUpdateCaDTO = { + caId: string; + status?: CaStatus; + requireTemplateForIssuance?: boolean; +} & Omit; + +export type TDeleteCaDTO = { + caId: string; +} & Omit; + +export type TGetCaCsrDTO = { + caId: string; +} & Omit; + +export type TRenewCaCertDTO = { + caId: string; + notAfter: string; + type: CaRenewalType; +} & Omit; + +export type TGetCaCertsDTO = { + caId: string; +} & Omit; + +export type TGetCaCertDTO = { + caId: string; +} & Omit; + +export type TSignIntermediateDTO = { + caId: string; + csr: string; + notBefore?: string; + notAfter: string; + maxPathLength: number; +} & Omit; + +export type TImportCertToCaDTO = { + caId: string; + certificate: string; + certificateChain: string; +} & Omit; + +export type TIssueCertFromCaDTO = { + caId?: string; + certificateTemplateId?: string; + pkiCollectionId?: string; + friendlyName?: string; + commonName: string; + altNames: string; + ttl: string; + notBefore?: string; + notAfter?: string; + keyUsages?: CertKeyUsage[]; + extendedKeyUsages?: CertExtendedKeyUsage[]; +} & Omit; + +export type TSignCertFromCaDTO = + | { + isInternal: true; + caId?: string; + csr: string; + certificateTemplateId?: string; + pkiCollectionId?: string; + friendlyName?: string; + commonName?: string; + altNames?: string; + ttl?: string; + notBefore?: string; + notAfter?: string; + keyUsages?: CertKeyUsage[]; + extendedKeyUsages?: CertExtendedKeyUsage[]; + } + | ({ + isInternal: false; + caId?: string; + csr: string; + certificateTemplateId?: string; + pkiCollectionId?: string; + friendlyName?: string; + commonName?: string; + altNames: string; + ttl: string; + notBefore?: string; + notAfter?: string; + keyUsages?: CertKeyUsage[]; + extendedKeyUsages?: CertExtendedKeyUsage[]; + } & Omit); + +export type TGetCaCertificateTemplatesDTO = { + caId: string; +} & Omit; + +export type TDNParts = { + commonName?: string; + organization?: string; + ou?: string; + country?: string; + province?: string; + locality?: string; +}; + +export type TGetCaCredentialsDTO = { + caId: string; + certificateAuthorityDAL: Pick; + certificateAuthoritySecretDAL: Pick; + projectDAL: Pick; + kmsService: Pick; +}; + +export type TGetCaCertChainsDTO = { + caId: string; + certificateAuthorityDAL: Pick; + certificateAuthorityCertDAL: Pick; + projectDAL: Pick; + kmsService: Pick; +}; + +export type TGetCaCertChainDTO = { + caCertId: string; + certificateAuthorityDAL: Pick; + certificateAuthorityCertDAL: Pick; + projectDAL: Pick; + kmsService: Pick; +}; + +export type TRebuildCaCrlDTO = { + caId: string; + certificateAuthorityDAL: Pick; + certificateAuthorityCrlDAL: Pick; + certificateAuthoritySecretDAL: Pick; + projectDAL: Pick; + certificateDAL: Pick; + kmsService: Pick; +}; + +export type TRotateCaCrlTriggerDTO = { + caId: string; + rotationIntervalDays: number; +}; diff --git a/backend/src/services/certificate-authority/certificate-authority-validators.ts b/backend/src/services/certificate-authority/certificate-authority-validators.ts new file mode 100644 index 0000000000..16e7dcf49d --- /dev/null +++ b/backend/src/services/certificate-authority/certificate-authority-validators.ts @@ -0,0 +1,34 @@ +import { z } from "zod"; + +const isValidDate = (dateString: string) => { + const date = new Date(dateString); + return !Number.isNaN(date.getTime()); +}; + +export const validateCaDateField = z.string().trim().refine(isValidDate, { message: "Invalid date format" }); + +export const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/; +export const validateAltNamesField = z + .string() + .trim() + .default("") + .transform((data) => { + if (data === "") return ""; + // Trim each alt name and join with ', ' to ensure formatting + return data + .split(",") + .map((id) => id.trim()) + .join(", "); + }) + .refine( + (data) => { + if (data === "") return true; + // Split and validate each alt name + return data.split(", ").every((name) => { + return hostnameRegex.test(name) || z.string().email().safeParse(name).success; + }); + }, + { + message: "Each alt name must be a valid hostname or email address" + } + ); diff --git a/backend/src/services/certificate-template/certificate-template-dal.ts b/backend/src/services/certificate-template/certificate-template-dal.ts new file mode 100644 index 0000000000..c500833d1c --- /dev/null +++ b/backend/src/services/certificate-template/certificate-template-dal.ts @@ -0,0 +1,60 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; + +export type TCertificateTemplateDALFactory = ReturnType; + +export const certificateTemplateDALFactory = (db: TDbClient) => { + const certificateTemplateOrm = ormify(db, TableName.CertificateTemplate); + + const getCertTemplatesByProjectId = async (projectId: string) => { + try { + const certTemplates = await db + .replicaNode()(TableName.CertificateTemplate) + .join( + TableName.CertificateAuthority, + `${TableName.CertificateAuthority}.id`, + `${TableName.CertificateTemplate}.caId` + ) + .where(`${TableName.CertificateAuthority}.projectId`, "=", projectId) + .select(selectAllTableCols(TableName.CertificateTemplate)) + .select( + db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority), + db.ref("projectId").withSchema(TableName.CertificateAuthority) + ); + + return certTemplates; + } catch (error) { + throw new DatabaseError({ error, name: "Get certificate templates by project ID" }); + } + }; + + const getById = async (id: string, tx?: Knex) => { + try { + const certTemplate = await (tx || db.replicaNode())(TableName.CertificateTemplate) + .join( + TableName.CertificateAuthority, + `${TableName.CertificateAuthority}.id`, + `${TableName.CertificateTemplate}.caId` + ) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.CertificateAuthority}.projectId`) + .where(`${TableName.CertificateTemplate}.id`, "=", id) + .select(selectAllTableCols(TableName.CertificateTemplate)) + .select( + db.ref("projectId").withSchema(TableName.CertificateAuthority), + db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority), + db.ref("orgId").withSchema(TableName.Project) + ) + .first(); + + return certTemplate; + } catch (error) { + throw new DatabaseError({ error, name: "Get certificate template by ID" }); + } + }; + + return { ...certificateTemplateOrm, getCertTemplatesByProjectId, getById }; +}; diff --git a/backend/src/services/certificate-template/certificate-template-est-config-dal.ts b/backend/src/services/certificate-template/certificate-template-est-config-dal.ts new file mode 100644 index 0000000000..99d137c5a4 --- /dev/null +++ b/backend/src/services/certificate-template/certificate-template-est-config-dal.ts @@ -0,0 +1,11 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateTemplateEstConfigDALFactory = ReturnType; + +export const certificateTemplateEstConfigDALFactory = (db: TDbClient) => { + const certificateTemplateEstConfigOrm = ormify(db, TableName.CertificateTemplateEstConfig); + + return certificateTemplateEstConfigOrm; +}; diff --git a/backend/src/services/certificate-template/certificate-template-fns.ts b/backend/src/services/certificate-template/certificate-template-fns.ts new file mode 100644 index 0000000000..597be7eb21 --- /dev/null +++ b/backend/src/services/certificate-template/certificate-template-fns.ts @@ -0,0 +1,36 @@ +import ms from "ms"; + +import { TCertificateTemplates } from "@app/db/schemas"; +import { BadRequestError } from "@app/lib/errors"; + +export const validateCertificateDetailsAgainstTemplate = ( + cert: { + commonName: string; + notBeforeDate: Date; + notAfterDate: Date; + altNames: string[]; + }, + template: TCertificateTemplates +) => { + const commonNameRegex = new RegExp(template.commonName); + if (!commonNameRegex.test(cert.commonName)) { + throw new BadRequestError({ + message: "Invalid common name based on template policy" + }); + } + + if (cert.notAfterDate.getTime() - cert.notBeforeDate.getTime() > ms(template.ttl)) { + throw new BadRequestError({ + message: "Invalid validity date based on template policy" + }); + } + + const subjectAlternativeNameRegex = new RegExp(template.subjectAlternativeName); + cert.altNames.forEach((altName) => { + if (!subjectAlternativeNameRegex.test(altName)) { + throw new BadRequestError({ + message: "Invalid subject alternative name based on template policy" + }); + } + }); +}; diff --git a/backend/src/services/certificate-template/certificate-template-schema.ts b/backend/src/services/certificate-template/certificate-template-schema.ts new file mode 100644 index 0000000000..7a87daddf0 --- /dev/null +++ b/backend/src/services/certificate-template/certificate-template-schema.ts @@ -0,0 +1,20 @@ +import z from "zod"; + +import { CertificateTemplatesSchema } from "@app/db/schemas"; + +export const sanitizedCertificateTemplate = CertificateTemplatesSchema.pick({ + id: true, + caId: true, + name: true, + commonName: true, + subjectAlternativeName: true, + pkiCollectionId: true, + ttl: true, + keyUsages: true, + extendedKeyUsages: true +}).merge( + z.object({ + projectId: z.string(), + caName: z.string() + }) +); diff --git a/backend/src/services/certificate-template/certificate-template-service.ts b/backend/src/services/certificate-template/certificate-template-service.ts new file mode 100644 index 0000000000..ead3e85cb3 --- /dev/null +++ b/backend/src/services/certificate-template/certificate-template-service.ts @@ -0,0 +1,484 @@ +import { ForbiddenError } from "@casl/ability"; +import * as x509 from "@peculiar/x509"; +import bcrypt from "bcrypt"; + +import { TCertificateTemplateEstConfigsUpdate } from "@app/db/schemas"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; + +import { isCertChainValid } from "../certificate/certificate-fns"; +import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { TProjectDALFactory } from "../project/project-dal"; +import { getProjectKmsCertificateKeyId } from "../project/project-fns"; +import { TCertificateTemplateDALFactory } from "./certificate-template-dal"; +import { TCertificateTemplateEstConfigDALFactory } from "./certificate-template-est-config-dal"; +import { + TCreateCertTemplateDTO, + TCreateEstConfigurationDTO, + TDeleteCertTemplateDTO, + TGetCertTemplateDTO, + TGetEstConfigurationDTO, + TUpdateCertTemplateDTO, + TUpdateEstConfigurationDTO +} from "./certificate-template-types"; + +type TCertificateTemplateServiceFactoryDep = { + certificateTemplateDAL: TCertificateTemplateDALFactory; + certificateTemplateEstConfigDAL: TCertificateTemplateEstConfigDALFactory; + projectDAL: Pick; + kmsService: Pick; + certificateAuthorityDAL: Pick; + permissionService: Pick; + licenseService: Pick; +}; + +export type TCertificateTemplateServiceFactory = ReturnType; + +export const certificateTemplateServiceFactory = ({ + certificateTemplateDAL, + certificateTemplateEstConfigDAL, + certificateAuthorityDAL, + permissionService, + kmsService, + projectDAL, + licenseService +}: TCertificateTemplateServiceFactoryDep) => { + const createCertTemplate = async ({ + caId, + pkiCollectionId, + name, + commonName, + subjectAlternativeName, + ttl, + actorId, + actorAuthMethod, + actor, + actorOrgId, + keyUsages, + extendedKeyUsages + }: TCreateCertTemplateDTO) => { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca) { + throw new NotFoundError({ + message: `CA with ID ${caId} not found` + }); + } + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.CertificateTemplates + ); + + return certificateTemplateDAL.transaction(async (tx) => { + const { id } = await certificateTemplateDAL.create( + { + caId, + pkiCollectionId, + name, + commonName, + subjectAlternativeName, + ttl, + keyUsages, + extendedKeyUsages + }, + tx + ); + + const certificateTemplate = await certificateTemplateDAL.getById(id, tx); + if (!certificateTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${id} not found` + }); + } + + return certificateTemplate; + }); + }; + + const updateCertTemplate = async ({ + id, + caId, + pkiCollectionId, + name, + commonName, + subjectAlternativeName, + ttl, + actorId, + actorAuthMethod, + actor, + actorOrgId, + keyUsages, + extendedKeyUsages + }: TUpdateCertTemplateDTO) => { + const certTemplate = await certificateTemplateDAL.getById(id); + if (!certTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${id} not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + certTemplate.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + ProjectPermissionSub.CertificateTemplates + ); + + if (caId) { + const ca = await certificateAuthorityDAL.findById(caId); + if (!ca || ca.projectId !== certTemplate.projectId) { + throw new BadRequestError({ + message: "Invalid CA" + }); + } + } + + return certificateTemplateDAL.transaction(async (tx) => { + await certificateTemplateDAL.updateById( + certTemplate.id, + { + caId, + pkiCollectionId, + commonName, + subjectAlternativeName, + name, + ttl, + keyUsages, + extendedKeyUsages + }, + tx + ); + + const updatedTemplate = await certificateTemplateDAL.getById(id, tx); + if (!updatedTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${id} not found` + }); + } + + return updatedTemplate; + }); + }; + + const deleteCertTemplate = async ({ id, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteCertTemplateDTO) => { + const certTemplate = await certificateTemplateDAL.getById(id); + if (!certTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${id} not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + certTemplate.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + ProjectPermissionSub.CertificateTemplates + ); + + await certificateTemplateDAL.deleteById(certTemplate.id); + + return certTemplate; + }; + + const getCertTemplate = async ({ id, actorId, actorAuthMethod, actor, actorOrgId }: TGetCertTemplateDTO) => { + const certTemplate = await certificateTemplateDAL.getById(id); + if (!certTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${id} not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + certTemplate.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateTemplates + ); + + return certTemplate; + }; + + const createEstConfiguration = async ({ + certificateTemplateId, + caChain, + passphrase, + isEnabled, + actorId, + actorAuthMethod, + actor, + actorOrgId, + disableBootstrapCertValidation + }: TCreateEstConfigurationDTO) => { + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.pkiEst) { + throw new BadRequestError({ + message: "Failed to create EST configuration due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + const certTemplate = await certificateTemplateDAL.getById(certificateTemplateId); + if (!certTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${certificateTemplateId} not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + certTemplate.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + ProjectPermissionSub.CertificateTemplates + ); + + const appCfg = getConfig(); + + let encryptedCaChain: Buffer | undefined; + if (caChain) { + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: certTemplate.projectId, + projectDAL, + kmsService + }); + + // validate CA chain + const certificates = caChain + .match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g) + ?.map((cert) => new x509.X509Certificate(cert)); + + if (!certificates) { + throw new BadRequestError({ message: "Failed to parse certificate chain" }); + } + + if (!(await isCertChainValid(certificates))) { + throw new BadRequestError({ message: "Invalid certificate chain" }); + } + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const { cipherTextBlob } = await kmsEncryptor({ + plainText: Buffer.from(caChain) + }); + + encryptedCaChain = cipherTextBlob; + } + + const hashedPassphrase = await bcrypt.hash(passphrase, appCfg.SALT_ROUNDS); + const estConfig = await certificateTemplateEstConfigDAL.create({ + certificateTemplateId, + hashedPassphrase, + encryptedCaChain, + isEnabled, + disableBootstrapCertValidation + }); + + return { ...estConfig, projectId: certTemplate.projectId }; + }; + + const updateEstConfiguration = async ({ + certificateTemplateId, + caChain, + passphrase, + isEnabled, + actorId, + actorAuthMethod, + actor, + actorOrgId, + disableBootstrapCertValidation + }: TUpdateEstConfigurationDTO) => { + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.pkiEst) { + throw new BadRequestError({ + message: "Failed to update EST configuration due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + const certTemplate = await certificateTemplateDAL.getById(certificateTemplateId); + if (!certTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${certificateTemplateId} not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + certTemplate.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + ProjectPermissionSub.CertificateTemplates + ); + + const originalCaEstConfig = await certificateTemplateEstConfigDAL.findOne({ + certificateTemplateId + }); + + if (!originalCaEstConfig) { + throw new NotFoundError({ + message: `EST configuration with certificate template ID ${certificateTemplateId} not found` + }); + } + + const appCfg = getConfig(); + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: certTemplate.projectId, + projectDAL, + kmsService + }); + + const updatedData: TCertificateTemplateEstConfigsUpdate = { + isEnabled, + disableBootstrapCertValidation + }; + + if (caChain) { + const certificates = caChain + .match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g) + ?.map((cert) => new x509.X509Certificate(cert)); + + if (!certificates) { + throw new BadRequestError({ message: "Failed to parse certificate chain" }); + } + + if (!(await isCertChainValid(certificates))) { + throw new BadRequestError({ message: "Invalid certificate chain" }); + } + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const { cipherTextBlob: encryptedCaChain } = await kmsEncryptor({ + plainText: Buffer.from(caChain) + }); + + updatedData.encryptedCaChain = encryptedCaChain; + } + + if (passphrase) { + const hashedPassphrase = await bcrypt.hash(passphrase, appCfg.SALT_ROUNDS); + updatedData.hashedPassphrase = hashedPassphrase; + } + + const estConfig = await certificateTemplateEstConfigDAL.updateById(originalCaEstConfig.id, updatedData); + + return { ...estConfig, projectId: certTemplate.projectId }; + }; + + const getEstConfiguration = async (dto: TGetEstConfigurationDTO) => { + const { certificateTemplateId } = dto; + + const certTemplate = await certificateTemplateDAL.getById(certificateTemplateId); + if (!certTemplate) { + throw new NotFoundError({ + message: `Certificate template with ID ${certificateTemplateId} not found` + }); + } + + if (!dto.isInternal) { + const { permission } = await permissionService.getProjectPermission( + dto.actor, + dto.actorId, + certTemplate.projectId, + dto.actorAuthMethod, + dto.actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + ProjectPermissionSub.CertificateTemplates + ); + } + + const estConfig = await certificateTemplateEstConfigDAL.findOne({ + certificateTemplateId + }); + + if (!estConfig) { + throw new NotFoundError({ + message: `EST configuration with certificate template ID ${certificateTemplateId} not found` + }); + } + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: certTemplate.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + let decryptedCaChain = ""; + if (estConfig.encryptedCaChain) { + decryptedCaChain = ( + await kmsDecryptor({ + cipherTextBlob: estConfig.encryptedCaChain + }) + ).toString(); + } + + return { + certificateTemplateId, + id: estConfig.id, + isEnabled: estConfig.isEnabled, + caChain: decryptedCaChain, + hashedPassphrase: estConfig.hashedPassphrase, + projectId: certTemplate.projectId, + orgId: certTemplate.orgId, + disableBootstrapCertValidation: estConfig.disableBootstrapCertValidation + }; + }; + + return { + createCertTemplate, + getCertTemplate, + deleteCertTemplate, + updateCertTemplate, + createEstConfiguration, + updateEstConfiguration, + getEstConfiguration + }; +}; diff --git a/backend/src/services/certificate-template/certificate-template-types.ts b/backend/src/services/certificate-template/certificate-template-types.ts new file mode 100644 index 0000000000..cdccb6a2d1 --- /dev/null +++ b/backend/src/services/certificate-template/certificate-template-types.ts @@ -0,0 +1,59 @@ +import { TProjectPermission } from "@app/lib/types"; +import { CertExtendedKeyUsage, CertKeyUsage } from "@app/services/certificate/certificate-types"; + +export type TCreateCertTemplateDTO = { + caId: string; + pkiCollectionId?: string; + name: string; + commonName: string; + subjectAlternativeName: string; + ttl: string; + keyUsages: CertKeyUsage[]; + extendedKeyUsages: CertExtendedKeyUsage[]; +} & Omit; + +export type TUpdateCertTemplateDTO = { + id: string; + caId?: string; + pkiCollectionId?: string; + name?: string; + commonName?: string; + subjectAlternativeName?: string; + ttl?: string; + keyUsages?: CertKeyUsage[]; + extendedKeyUsages?: CertExtendedKeyUsage[]; +} & Omit; + +export type TGetCertTemplateDTO = { + id: string; +} & Omit; + +export type TDeleteCertTemplateDTO = { + id: string; +} & Omit; + +export type TCreateEstConfigurationDTO = { + certificateTemplateId: string; + caChain?: string; + passphrase: string; + isEnabled: boolean; + disableBootstrapCertValidation: boolean; +} & Omit; + +export type TUpdateEstConfigurationDTO = { + certificateTemplateId: string; + caChain?: string; + passphrase?: string; + isEnabled?: boolean; + disableBootstrapCertValidation?: boolean; +} & Omit; + +export type TGetEstConfigurationDTO = + | { + isInternal: true; + certificateTemplateId: string; + } + | ({ + isInternal: false; + certificateTemplateId: string; + } & Omit); diff --git a/backend/src/services/certificate-template/certificate-template-validators.ts b/backend/src/services/certificate-template/certificate-template-validators.ts new file mode 100644 index 0000000000..41a06b05d9 --- /dev/null +++ b/backend/src/services/certificate-template/certificate-template-validators.ts @@ -0,0 +1,14 @@ +import safe from "safe-regex"; +import z from "zod"; + +export const validateTemplateRegexField = z + .string() + .min(1) + .max(100) + .regex(/^[a-zA-Z0-9 *@\-\\.\\]+$/, { + message: "Invalid pattern: only alphanumeric characters, spaces, *, ., @, -, and \\ are allowed." + }) + // we ensure that the inputted pattern is computationally safe by limiting star height to 1 + .refine((v) => safe(v), { + message: "Unsafe REGEX pattern" + }); diff --git a/backend/src/services/certificate/certificate-body-dal.ts b/backend/src/services/certificate/certificate-body-dal.ts new file mode 100644 index 0000000000..9ddc989664 --- /dev/null +++ b/backend/src/services/certificate/certificate-body-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateBodyDALFactory = ReturnType; + +export const certificateBodyDALFactory = (db: TDbClient) => { + const certificateBodyOrm = ormify(db, TableName.CertificateBody); + return certificateBodyOrm; +}; diff --git a/backend/src/services/certificate/certificate-dal.ts b/backend/src/services/certificate/certificate-dal.ts new file mode 100644 index 0000000000..71c70838c8 --- /dev/null +++ b/backend/src/services/certificate/certificate-dal.ts @@ -0,0 +1,51 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateDALFactory = ReturnType; + +export const certificateDALFactory = (db: TDbClient) => { + const certificateOrm = ormify(db, TableName.Certificate); + + const countCertificatesInProject = async ({ + projectId, + friendlyName, + commonName + }: { + projectId: string; + friendlyName?: string; + commonName?: string; + }) => { + try { + interface CountResult { + count: string; + } + + let query = db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .join(TableName.Project, `${TableName.CertificateAuthority}.projectId`, `${TableName.Project}.id`) + .where(`${TableName.Project}.id`, projectId); + + if (friendlyName) { + query = query.andWhere(`${TableName.Certificate}.friendlyName`, friendlyName); + } + + if (commonName) { + query = query.andWhere(`${TableName.Certificate}.commonName`, commonName); + } + + const count = await query.count("*").first(); + + return parseInt((count as unknown as CountResult).count || "0", 10); + } catch (error) { + throw new DatabaseError({ error, name: "Count all project certificates" }); + } + }; + + return { + ...certificateOrm, + countCertificatesInProject + }; +}; diff --git a/backend/src/services/certificate/certificate-fns.ts b/backend/src/services/certificate/certificate-fns.ts new file mode 100644 index 0000000000..1768a50117 --- /dev/null +++ b/backend/src/services/certificate/certificate-fns.ts @@ -0,0 +1,42 @@ +import * as x509 from "@peculiar/x509"; + +import { CrlReason } from "./certificate-types"; + +export const revocationReasonToCrlCode = (crlReason: CrlReason) => { + switch (crlReason) { + case CrlReason.KEY_COMPROMISE: + return x509.X509CrlReason.keyCompromise; + case CrlReason.CA_COMPROMISE: + return x509.X509CrlReason.cACompromise; + case CrlReason.AFFILIATION_CHANGED: + return x509.X509CrlReason.affiliationChanged; + case CrlReason.SUPERSEDED: + return x509.X509CrlReason.superseded; + case CrlReason.CESSATION_OF_OPERATION: + return x509.X509CrlReason.cessationOfOperation; + case CrlReason.CERTIFICATE_HOLD: + return x509.X509CrlReason.certificateHold; + case CrlReason.PRIVILEGE_WITHDRAWN: + return x509.X509CrlReason.privilegeWithdrawn; + case CrlReason.A_A_COMPROMISE: + return x509.X509CrlReason.aACompromise; + default: + return x509.X509CrlReason.unspecified; + } +}; + +export const isCertChainValid = async (certificates: x509.X509Certificate[]) => { + if (certificates.length === 1) { + return true; + } + + const leafCert = certificates[0]; + const chain = new x509.X509ChainBuilder({ + certificates: certificates.slice(1) + }); + + const chainItems = await chain.build(leafCert); + + // chain.build() implicitly verifies the chain + return chainItems.length === certificates.length; +}; diff --git a/backend/src/services/certificate/certificate-service.ts b/backend/src/services/certificate/certificate-service.ts new file mode 100644 index 0000000000..8dc2de9013 --- /dev/null +++ b/backend/src/services/certificate/certificate-service.ts @@ -0,0 +1,205 @@ +import { ForbiddenError } from "@casl/ability"; +import * as x509 from "@peculiar/x509"; + +import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal"; +import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; +import { TCertificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { getCaCertChain, rebuildCaCrl } from "../certificate-authority/certificate-authority-fns"; +import { revocationReasonToCrlCode } from "./certificate-fns"; +import { CertStatus, TDeleteCertDTO, TGetCertBodyDTO, TGetCertDTO, TRevokeCertDTO } from "./certificate-types"; + +type TCertificateServiceFactoryDep = { + certificateDAL: Pick; + certificateBodyDAL: Pick; + certificateAuthorityDAL: Pick; + certificateAuthorityCertDAL: Pick; + certificateAuthorityCrlDAL: Pick; + certificateAuthoritySecretDAL: Pick; + projectDAL: Pick; + kmsService: Pick; + permissionService: Pick; +}; + +export type TCertificateServiceFactory = ReturnType; + +export const certificateServiceFactory = ({ + certificateDAL, + certificateBodyDAL, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + certificateAuthorityCrlDAL, + certificateAuthoritySecretDAL, + projectDAL, + kmsService, + permissionService +}: TCertificateServiceFactoryDep) => { + /** + * Return details for certificate with serial number [serialNumber] + */ + const getCert = async ({ serialNumber, actorId, actorAuthMethod, actor, actorOrgId }: TGetCertDTO) => { + const cert = await certificateDAL.findOne({ serialNumber }); + const ca = await certificateAuthorityDAL.findById(cert.caId); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates); + + return { + cert, + ca + }; + }; + + /** + * Delete certificate with serial number [serialNumber] + */ + const deleteCert = async ({ serialNumber, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteCertDTO) => { + const cert = await certificateDAL.findOne({ serialNumber }); + const ca = await certificateAuthorityDAL.findById(cert.caId); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates); + + const deletedCert = await certificateDAL.deleteById(cert.id); + + return { + deletedCert, + ca + }; + }; + + /** + * Revoke certificate with serial number [serialNumber]. + * Note: Revoking a certificate adds it to the certificate revocation list (CRL) + * of its issuing CA + */ + const revokeCert = async ({ + serialNumber, + revocationReason, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TRevokeCertDTO) => { + const cert = await certificateDAL.findOne({ serialNumber }); + const ca = await certificateAuthorityDAL.findById(cert.caId); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates); + + if (cert.status === CertStatus.REVOKED) throw new Error("Certificate already revoked"); + + const revokedAt = new Date(); + await certificateDAL.update( + { + id: cert.id + }, + { + status: CertStatus.REVOKED, + revokedAt, + revocationReason: revocationReasonToCrlCode(revocationReason) + } + ); + + // rebuild CRL (TODO: move to interval-based cron job) + await rebuildCaCrl({ + caId: ca.id, + certificateAuthorityDAL, + certificateAuthorityCrlDAL, + certificateAuthoritySecretDAL, + projectDAL, + certificateDAL, + kmsService + }); + + return { revokedAt, cert, ca }; + }; + + /** + * Return certificate body and certificate chain for certificate with + * serial number [serialNumber] + */ + const getCertBody = async ({ serialNumber, actorId, actorAuthMethod, actor, actorOrgId }: TGetCertBodyDTO) => { + const cert = await certificateDAL.findOne({ serialNumber }); + const ca = await certificateAuthorityDAL.findById(cert.caId); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + ca.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates); + + const certBody = await certificateBodyDAL.findOne({ certId: cert.id }); + + const certificateManagerKeyId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsDecryptor = await kmsService.decryptWithKmsKey({ + kmsId: certificateManagerKeyId + }); + const decryptedCert = await kmsDecryptor({ + cipherTextBlob: certBody.encryptedCertificate + }); + + const certObj = new x509.X509Certificate(decryptedCert); + + const { caCert, caCertChain } = await getCaCertChain({ + caCertId: cert.caCertId, + certificateAuthorityDAL, + certificateAuthorityCertDAL, + projectDAL, + kmsService + }); + + return { + certificate: certObj.toString("pem"), + certificateChain: `${caCert}\n${caCertChain}`.trim(), + serialNumber: certObj.serialNumber, + cert, + ca + }; + }; + + return { + getCert, + deleteCert, + revokeCert, + getCertBody + }; +}; diff --git a/backend/src/services/certificate/certificate-types.ts b/backend/src/services/certificate/certificate-types.ts new file mode 100644 index 0000000000..ef63f142d3 --- /dev/null +++ b/backend/src/services/certificate/certificate-types.ts @@ -0,0 +1,75 @@ +import * as x509 from "@peculiar/x509"; + +import { TProjectPermission } from "@app/lib/types"; + +export enum CertStatus { + ACTIVE = "active", + REVOKED = "revoked" +} + +export enum CertKeyAlgorithm { + RSA_2048 = "RSA_2048", + RSA_4096 = "RSA_4096", + ECDSA_P256 = "EC_prime256v1", + ECDSA_P384 = "EC_secp384r1" +} + +export enum CertKeyUsage { + DIGITAL_SIGNATURE = "digitalSignature", + KEY_ENCIPHERMENT = "keyEncipherment", + NON_REPUDIATION = "nonRepudiation", + DATA_ENCIPHERMENT = "dataEncipherment", + KEY_AGREEMENT = "keyAgreement", + KEY_CERT_SIGN = "keyCertSign", + CRL_SIGN = "cRLSign", + ENCIPHER_ONLY = "encipherOnly", + DECIPHER_ONLY = "decipherOnly" +} + +export enum CertExtendedKeyUsage { + CLIENT_AUTH = "clientAuth", + SERVER_AUTH = "serverAuth", + CODE_SIGNING = "codeSigning", + EMAIL_PROTECTION = "emailProtection", + TIMESTAMPING = "timeStamping", + OCSP_SIGNING = "ocspSigning" +} + +export const CertExtendedKeyUsageOIDToName: Record = { + [x509.ExtendedKeyUsage.clientAuth]: CertExtendedKeyUsage.CLIENT_AUTH, + [x509.ExtendedKeyUsage.serverAuth]: CertExtendedKeyUsage.SERVER_AUTH, + [x509.ExtendedKeyUsage.codeSigning]: CertExtendedKeyUsage.CODE_SIGNING, + [x509.ExtendedKeyUsage.emailProtection]: CertExtendedKeyUsage.EMAIL_PROTECTION, + [x509.ExtendedKeyUsage.ocspSigning]: CertExtendedKeyUsage.OCSP_SIGNING, + [x509.ExtendedKeyUsage.timeStamping]: CertExtendedKeyUsage.TIMESTAMPING +}; + +export enum CrlReason { + UNSPECIFIED = "UNSPECIFIED", + KEY_COMPROMISE = "KEY_COMPROMISE", + CA_COMPROMISE = "CA_COMPROMISE", + AFFILIATION_CHANGED = "AFFILIATION_CHANGED", + SUPERSEDED = "SUPERSEDED", + CESSATION_OF_OPERATION = "CESSATION_OF_OPERATION", + CERTIFICATE_HOLD = "CERTIFICATE_HOLD", + // REMOVE_FROM_CRL = "REMOVE_FROM_CRL", + PRIVILEGE_WITHDRAWN = "PRIVILEGE_WITHDRAWN", + A_A_COMPROMISE = "A_A_COMPROMISE" +} + +export type TGetCertDTO = { + serialNumber: string; +} & Omit; + +export type TDeleteCertDTO = { + serialNumber: string; +} & Omit; + +export type TRevokeCertDTO = { + serialNumber: string; + revocationReason: CrlReason; +} & Omit; + +export type TGetCertBodyDTO = { + serialNumber: string; +} & Omit; diff --git a/backend/src/services/cmek/cmek-service.ts b/backend/src/services/cmek/cmek-service.ts new file mode 100644 index 0000000000..c8e1b932a9 --- /dev/null +++ b/backend/src/services/cmek/cmek-service.ts @@ -0,0 +1,166 @@ +import { ForbiddenError } from "@casl/ability"; + +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionCmekActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { OrgServiceActor } from "@app/lib/types"; +import { + TCmekDecryptDTO, + TCmekEncryptDTO, + TCreateCmekDTO, + TListCmeksByProjectIdDTO, + TUpdabteCmekByIdDTO +} from "@app/services/cmek/cmek-types"; +import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; + +type TCmekServiceFactoryDep = { + kmsService: TKmsServiceFactory; + kmsDAL: TKmsKeyDALFactory; + permissionService: TPermissionServiceFactory; +}; + +export type TCmekServiceFactory = ReturnType; + +export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TCmekServiceFactoryDep) => { + const createCmek = async ({ projectId, ...dto }: TCreateCmekDTO, actor: OrgServiceActor) => { + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + projectId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionCmekActions.Create, ProjectPermissionSub.Cmek); + + const cmek = await kmsService.generateKmsKey({ + ...dto, + projectId, + isReserved: false + }); + + return cmek; + }; + + const updateCmekById = async ({ keyId, ...data }: TUpdabteCmekByIdDTO, actor: OrgServiceActor) => { + const key = await kmsDAL.findById(keyId); + + if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` }); + + if (!key.projectId || key.isReserved) throw new BadRequestError({ message: "Key is not customer managed" }); + + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + key.projectId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionCmekActions.Edit, ProjectPermissionSub.Cmek); + + const cmek = await kmsDAL.updateById(keyId, data); + + return cmek; + }; + + const deleteCmekById = async (keyId: string, actor: OrgServiceActor) => { + const key = await kmsDAL.findById(keyId); + + if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` }); + + if (!key.projectId || key.isReserved) throw new BadRequestError({ message: "Key is not customer managed" }); + + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + key.projectId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionCmekActions.Delete, ProjectPermissionSub.Cmek); + + const cmek = kmsDAL.deleteById(keyId); + + return cmek; + }; + + const listCmeksByProjectId = async ({ projectId, ...filters }: TListCmeksByProjectIdDTO, actor: OrgServiceActor) => { + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + projectId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionCmekActions.Read, ProjectPermissionSub.Cmek); + + const { keys: cmeks, totalCount } = await kmsDAL.findKmsKeysByProjectId({ projectId, ...filters }); + + return { cmeks, totalCount }; + }; + + const cmekEncrypt = async ({ keyId, plaintext }: TCmekEncryptDTO, actor: OrgServiceActor) => { + const key = await kmsDAL.findById(keyId); + + if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` }); + + if (!key.projectId || key.isReserved) throw new BadRequestError({ message: "Key is not customer managed" }); + + if (key.isDisabled) throw new BadRequestError({ message: "Key is disabled" }); + + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + key.projectId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionCmekActions.Encrypt, ProjectPermissionSub.Cmek); + + const encrypt = await kmsService.encryptWithKmsKey({ kmsId: keyId }); + + const { cipherTextBlob } = await encrypt({ plainText: Buffer.from(plaintext, "base64") }); + + return cipherTextBlob.toString("base64"); + }; + + const cmekDecrypt = async ({ keyId, ciphertext }: TCmekDecryptDTO, actor: OrgServiceActor) => { + const key = await kmsDAL.findById(keyId); + + if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` }); + + if (!key.projectId || key.isReserved) throw new BadRequestError({ message: "Key is not customer managed" }); + + if (key.isDisabled) throw new BadRequestError({ message: "Key is disabled" }); + + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + key.projectId, + actor.authMethod, + actor.orgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionCmekActions.Decrypt, ProjectPermissionSub.Cmek); + + const decrypt = await kmsService.decryptWithKmsKey({ kmsId: keyId }); + + const plaintextBlob = await decrypt({ cipherTextBlob: Buffer.from(ciphertext, "base64") }); + + return plaintextBlob.toString("base64"); + }; + + return { + createCmek, + updateCmekById, + deleteCmekById, + listCmeksByProjectId, + cmekEncrypt, + cmekDecrypt + }; +}; diff --git a/backend/src/services/cmek/cmek-types.ts b/backend/src/services/cmek/cmek-types.ts new file mode 100644 index 0000000000..b99ff1d6e3 --- /dev/null +++ b/backend/src/services/cmek/cmek-types.ts @@ -0,0 +1,40 @@ +import { SymmetricEncryption } from "@app/lib/crypto/cipher"; +import { OrderByDirection } from "@app/lib/types"; + +export type TCreateCmekDTO = { + orgId: string; + projectId: string; + name: string; + description?: string; + encryptionAlgorithm: SymmetricEncryption; +}; + +export type TUpdabteCmekByIdDTO = { + keyId: string; + name?: string; + isDisabled?: boolean; + description?: string; +}; + +export type TListCmeksByProjectIdDTO = { + projectId: string; + offset?: number; + limit?: number; + orderBy?: CmekOrderBy; + orderDirection?: OrderByDirection; + search?: string; +}; + +export type TCmekEncryptDTO = { + keyId: string; + plaintext: string; +}; + +export type TCmekDecryptDTO = { + keyId: string; + ciphertext: string; +}; + +export enum CmekOrderBy { + Name = "name" +} diff --git a/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-dal.ts b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-dal.ts new file mode 100644 index 0000000000..6f8f5973c2 --- /dev/null +++ b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-dal.ts @@ -0,0 +1,46 @@ +import { Tables } from "knex/types/tables"; + +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { TExternalGroupOrgRoleMappings } from "@app/db/schemas/external-group-org-role-mappings"; +import { ormify } from "@app/lib/knex"; + +export type TExternalGroupOrgRoleMappingDALFactory = ReturnType; + +export const externalGroupOrgRoleMappingDALFactory = (db: TDbClient) => { + const externalGroupOrgRoleMappingOrm = ormify(db, TableName.ExternalGroupOrgRoleMapping); + + const updateExternalGroupOrgRoleMappingForOrg = async ( + orgId: string, + newMappings: readonly Tables[TableName.ExternalGroupOrgRoleMapping]["insert"][] + ) => { + const currentMappings = await externalGroupOrgRoleMappingOrm.find({ orgId }); + + const newMap = new Map(newMappings.map((mapping) => [mapping.groupName, mapping])); + const currentMap = new Map(currentMappings.map((mapping) => [mapping.groupName, mapping])); + + const mappingsToDelete = currentMappings.filter((mapping) => !newMap.has(mapping.groupName)); + const mappingsToUpdate = currentMappings + .filter((mapping) => newMap.has(mapping.groupName)) + .map((mapping) => ({ id: mapping.id, ...newMap.get(mapping.groupName) })); + const mappingsToInsert = newMappings.filter((mapping) => !currentMap.has(mapping.groupName)); + + const mappings = await externalGroupOrgRoleMappingOrm.transaction(async (tx) => { + await externalGroupOrgRoleMappingOrm.delete({ $in: { id: mappingsToDelete.map((mapping) => mapping.id) } }, tx); + + const updatedMappings: TExternalGroupOrgRoleMappings[] = []; + for await (const { id, ...mappingData } of mappingsToUpdate) { + const updatedMapping = await externalGroupOrgRoleMappingOrm.update({ id }, mappingData, tx); + updatedMappings.push(updatedMapping[0]); + } + + const insertedMappings = await externalGroupOrgRoleMappingOrm.insertMany(mappingsToInsert, tx); + + return [...updatedMappings, ...insertedMappings]; + }); + + return mappings; + }; + + return { ...externalGroupOrgRoleMappingOrm, updateExternalGroupOrgRoleMappingForOrg }; +}; diff --git a/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-fns.ts b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-fns.ts new file mode 100644 index 0000000000..fe67242512 --- /dev/null +++ b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-fns.ts @@ -0,0 +1,67 @@ +import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { TOrgRoleDALFactory } from "@app/services/org/org-role-dal"; +import { isCustomOrgRole } from "@app/services/org/org-role-fns"; + +import { TExternalGroupOrgMembershipRoleMappingDTO } from "./external-group-org-role-mapping-types"; + +export const constructGroupOrgMembershipRoleMappings = async ({ + mappingsDTO, + orgId, + orgRoleDAL, + licenseService +}: { + mappingsDTO: TExternalGroupOrgMembershipRoleMappingDTO[]; + orgRoleDAL: TOrgRoleDALFactory; + licenseService: TLicenseServiceFactory; + orgId: string; +}) => { + const plan = await licenseService.getPlan(orgId); + + // prevent setting custom values if not in plan + if (mappingsDTO.some((map) => isCustomOrgRole(map.roleSlug)) && !plan?.rbac) + throw new BadRequestError({ + message: + "Failed to set group organization role mapping due to plan RBAC restriction. Upgrade plan to set custom role mapping." + }); + + const customRoleSlugs = mappingsDTO + .filter((mapping) => isCustomOrgRole(mapping.roleSlug)) + .map((mapping) => mapping.roleSlug); + + let customRolesMap: Map = new Map(); + if (customRoleSlugs.length > 0) { + const customRoles = await orgRoleDAL.find({ + $in: { + slug: customRoleSlugs + } + }); + + customRolesMap = new Map(customRoles.map((role) => [role.slug, role])); + } + + const mappings = mappingsDTO.map(({ roleSlug, groupName }) => { + if (isCustomOrgRole(roleSlug)) { + const customRole = customRolesMap.get(roleSlug); + + if (!customRole) throw new NotFoundError({ message: `Custom role ${roleSlug} not found.` }); + + return { + groupName, + role: OrgMembershipRole.Custom, + roleId: customRole.id, + orgId + }; + } + + return { + groupName, + role: roleSlug, + roleId: null, // need to set explicitly null for updates + orgId + }; + }); + + return mappings; +}; diff --git a/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-service.ts b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-service.ts new file mode 100644 index 0000000000..b386114199 --- /dev/null +++ b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-service.ts @@ -0,0 +1,78 @@ +import { ForbiddenError } from "@casl/ability"; + +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { OrgServiceActor } from "@app/lib/types"; +import { constructGroupOrgMembershipRoleMappings } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-fns"; +import { TSyncExternalGroupOrgMembershipRoleMappingsDTO } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-types"; +import { TOrgRoleDALFactory } from "@app/services/org/org-role-dal"; + +import { TExternalGroupOrgRoleMappingDALFactory } from "./external-group-org-role-mapping-dal"; + +type TExternalGroupOrgRoleMappingServiceFactoryDep = { + externalGroupOrgRoleMappingDAL: TExternalGroupOrgRoleMappingDALFactory; + permissionService: TPermissionServiceFactory; + licenseService: TLicenseServiceFactory; + orgRoleDAL: TOrgRoleDALFactory; +}; + +export type TExternalGroupOrgRoleMappingServiceFactory = ReturnType; + +export const externalGroupOrgRoleMappingServiceFactory = ({ + externalGroupOrgRoleMappingDAL, + licenseService, + permissionService, + orgRoleDAL +}: TExternalGroupOrgRoleMappingServiceFactoryDep) => { + const listExternalGroupOrgRoleMappings = async (actor: OrgServiceActor) => { + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + actor.orgId, + actor.authMethod, + actor.orgId + ); + + // TODO: will need to change if we add support for ldap, oidc, etc. + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Scim); + + const mappings = await externalGroupOrgRoleMappingDAL.find({ + orgId: actor.orgId + }); + + return mappings; + }; + + const updateExternalGroupOrgRoleMappings = async ( + dto: TSyncExternalGroupOrgMembershipRoleMappingsDTO, + actor: OrgServiceActor + ) => { + const { permission } = await permissionService.getOrgPermission( + actor.type, + actor.id, + actor.orgId, + actor.authMethod, + actor.orgId + ); + + // TODO: will need to change if we add support for ldap, oidc, etc. + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Scim); + + const mappings = await constructGroupOrgMembershipRoleMappings({ + mappingsDTO: dto.mappings, + orgRoleDAL, + licenseService, + orgId: actor.orgId + }); + + const data = await externalGroupOrgRoleMappingDAL.updateExternalGroupOrgRoleMappingForOrg(actor.orgId, mappings); + + return data; + }; + + return { + updateExternalGroupOrgRoleMappings, + listExternalGroupOrgRoleMappings + }; +}; diff --git a/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-types.ts b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-types.ts new file mode 100644 index 0000000000..2b2ccc10c6 --- /dev/null +++ b/backend/src/services/external-group-org-role-mapping/external-group-org-role-mapping-types.ts @@ -0,0 +1,8 @@ +export type TExternalGroupOrgMembershipRoleMappingDTO = { + groupName: string; + roleSlug: string; +}; + +export type TSyncExternalGroupOrgMembershipRoleMappingsDTO = { + mappings: TExternalGroupOrgMembershipRoleMappingDTO[]; +}; diff --git a/backend/src/services/external-migration/external-migration-fns.ts b/backend/src/services/external-migration/external-migration-fns.ts new file mode 100644 index 0000000000..7ae0d0aad8 --- /dev/null +++ b/backend/src/services/external-migration/external-migration-fns.ts @@ -0,0 +1,777 @@ +import slugify from "@sindresorhus/slugify"; +import { randomUUID } from "crypto"; +import sjcl from "sjcl"; +import tweetnacl from "tweetnacl"; +import tweetnaclUtil from "tweetnacl-util"; + +import { SecretType, TSecretFolders } from "@app/db/schemas"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { chunkArray } from "@app/lib/fn"; +import { logger } from "@app/lib/logger"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; +import { TProjectDALFactory } from "../project/project-dal"; +import { TProjectServiceFactory } from "../project/project-service"; +import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; +import { TProjectEnvServiceFactory } from "../project-env/project-env-service"; +import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; +import { fnSecretBulkInsert, getAllSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns"; +import type { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service"; +import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal"; +import { InfisicalImportData, TEnvKeyExportJSON, TImportInfisicalDataCreate } from "./external-migration-types"; + +export type TImportDataIntoInfisicalDTO = { + projectDAL: Pick; + projectEnvDAL: Pick; + kmsService: Pick; + + secretDAL: Pick; + secretVersionDAL: Pick; + secretTagDAL: Pick; + secretVersionTagDAL: Pick; + + folderDAL: Pick; + projectService: Pick; + projectEnvService: Pick; + secretV2BridgeService: Pick; + + input: TImportInfisicalDataCreate; +}; + +const { codec, hash } = sjcl; +const { secretbox } = tweetnacl; + +export const decryptEnvKeyDataFn = async (decryptionKey: string, encryptedJson: { nonce: string; data: string }) => { + const key = tweetnaclUtil.decodeBase64(codec.base64.fromBits(hash.sha256.hash(decryptionKey))); + const nonce = tweetnaclUtil.decodeBase64(encryptedJson.nonce); + const encryptedData = tweetnaclUtil.decodeBase64(encryptedJson.data); + + const decrypted = secretbox.open(encryptedData, nonce, key); + + if (!decrypted) { + throw new BadRequestError({ message: "Decryption failed, please check the entered encryption key" }); + } + + const decryptedJson = tweetnaclUtil.encodeUTF8(decrypted); + return decryptedJson; +}; + +export const parseEnvKeyDataFn = async (decryptedJson: string): Promise => { + const parsedJson: TEnvKeyExportJSON = JSON.parse(decryptedJson) as TEnvKeyExportJSON; + + const infisicalImportData: InfisicalImportData = { + projects: [], + environments: [], + folders: [], + secrets: [] + }; + + parsedJson.apps.forEach((app: { name: string; id: string }) => { + infisicalImportData.projects.push({ name: app.name, id: app.id }); + }); + + // string to string map for env templates + const envTemplates = new Map(); + for (const env of parsedJson.defaultEnvironmentRoles) { + envTemplates.set(env.id, env.defaultName); + } + + // custom base environments + for (const env of parsedJson.nonDefaultEnvironmentRoles) { + envTemplates.set(env.id, env.name); + } + + // environments + for (const env of parsedJson.baseEnvironments) { + const appId = parsedJson.apps.find((a) => a.id === env.envParentId)?.id; + + // If we find the app from the envParentId, we know this is a root-level environment. + if (appId) { + infisicalImportData.environments.push({ + id: env.id, + name: envTemplates.get(env.environmentRoleId)!, + projectId: appId + }); + } + } + + const findRootInheritedSecret = ( + secret: { val?: string; inheritsEnvironmentId?: string }, + secretName: string, + envs: typeof parsedJson.envs + ): { val?: string } => { + if (!secret) { + return { + val: "" + }; + } + + // If we have a direct value, return it + if (secret.val !== undefined) { + return secret; + } + + // If there's no inheritance, return the secret as is + if (!secret.inheritsEnvironmentId) { + return secret; + } + + const inheritedEnv = envs[secret.inheritsEnvironmentId]; + if (!inheritedEnv) return secret; + return findRootInheritedSecret(inheritedEnv.variables[secretName], secretName, envs); + }; + + const targetIdToFolderIdsMap = new Map(); + + const processBranches = () => { + for (const subEnv of parsedJson.subEnvironments) { + const app = parsedJson.apps.find((a) => a.id === subEnv.envParentId); + const block = parsedJson.blocks.find((b) => b.id === subEnv.envParentId); + + if (app) { + // Handle regular app branches + const branchEnvironment = infisicalImportData.environments.find((e) => e.id === subEnv.parentEnvironmentId); + + // check if the folder already exists in the same parent environment with the same name + + const folderExists = infisicalImportData.folders.some( + (f) => f.name === subEnv.subName && f.parentFolderId === subEnv.parentEnvironmentId + ); + + // No need to map to target ID's here, because we are not dealing with blocks + if (!folderExists) { + infisicalImportData.folders.push({ + name: subEnv.subName, + parentFolderId: subEnv.parentEnvironmentId, + environmentId: branchEnvironment!.id, + id: subEnv.id + }); + } + } + + if (block) { + // Handle block branches + // 1. Find all apps that use this block + const appsUsingBlock = parsedJson.appBlocks.filter((ab) => ab.blockId === block.id); + + for (const { appId, orderIndex } of appsUsingBlock) { + // 2. Find the matching environment in the app based on the environment role + const blockBaseEnv = parsedJson.baseEnvironments.find((be) => be.id === subEnv.parentEnvironmentId); + + // eslint-disable-next-line no-continue + if (!blockBaseEnv) continue; + + const matchingAppEnv = parsedJson.baseEnvironments.find( + (be) => be.envParentId === appId && be.environmentRoleId === blockBaseEnv.environmentRoleId + ); + + // eslint-disable-next-line no-continue + if (!matchingAppEnv) continue; + + const folderExists = infisicalImportData.folders.some( + (f) => f.name === subEnv.subName && f.parentFolderId === matchingAppEnv.id + ); + + if (!folderExists) { + // 3. Create a folder in the matching app environment + infisicalImportData.folders.push({ + name: subEnv.subName, + parentFolderId: matchingAppEnv.id, + environmentId: matchingAppEnv.id, + id: `${subEnv.id}-${appId}` // Create unique ID for each app's copy of the branch + }); + } else { + // folder already exists, so lets map the old folder id to the new folder id + targetIdToFolderIdsMap.set(subEnv.id, `${subEnv.id}-${appId}`); + } + + // 4. Process secrets in the block branch for this app + const branchSecrets = parsedJson.envs[subEnv.id]?.variables || {}; + for (const [secretName, secretData] of Object.entries(branchSecrets)) { + if (secretData.inheritsEnvironmentId) { + const resolvedSecret = findRootInheritedSecret(secretData, secretName, parsedJson.envs); + + // If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence. + const preExistingSecretIndex = infisicalImportData.secrets.findIndex( + (s) => s.name === secretName && s.environmentId === matchingAppEnv.id + ); + + if (preExistingSecretIndex !== -1) { + const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex]; + + if ( + preExistingSecret.appBlockOrderIndex !== undefined && + orderIndex > preExistingSecret.appBlockOrderIndex + ) { + // if the existing secret has a lower orderIndex, we should replace it + infisicalImportData.secrets[preExistingSecretIndex] = { + ...preExistingSecret, + value: resolvedSecret.val || "", + appBlockOrderIndex: orderIndex + }; + } + + // eslint-disable-next-line no-continue + continue; + } + + infisicalImportData.secrets.push({ + id: randomUUID(), + name: secretName, + environmentId: matchingAppEnv.id, + value: resolvedSecret.val || "", + folderId: `${subEnv.id}-${appId}`, + appBlockOrderIndex: orderIndex + }); + } else { + // If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence. + const preExistingSecretIndex = infisicalImportData.secrets.findIndex( + (s) => s.name === secretName && s.environmentId === matchingAppEnv.id + ); + + if (preExistingSecretIndex !== -1) { + const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex]; + + if ( + preExistingSecret.appBlockOrderIndex !== undefined && + orderIndex > preExistingSecret.appBlockOrderIndex + ) { + // if the existing secret has a lower orderIndex, we should replace it + infisicalImportData.secrets[preExistingSecretIndex] = { + ...preExistingSecret, + value: secretData.val || "", + appBlockOrderIndex: orderIndex + }; + } + + // eslint-disable-next-line no-continue + continue; + } + + infisicalImportData.secrets.push({ + id: randomUUID(), + name: secretName, + environmentId: matchingAppEnv.id, + value: secretData.val || "", + folderId: `${subEnv.id}-${appId}`, + appBlockOrderIndex: orderIndex + }); + } + } + } + } + } + }; + + const processBlocksForApp = (appIds: string[]) => { + for (const appId of appIds) { + const blocksInApp = parsedJson.appBlocks.filter((ab) => ab.appId === appId); + logger.info( + { + blocksInApp + }, + "[processBlocksForApp]: Processing blocks for app" + ); + + for (const appBlock of blocksInApp) { + // 1. find all base environments for this block + const blockBaseEnvironments = parsedJson.baseEnvironments.filter((env) => env.envParentId === appBlock.blockId); + logger.info( + { + blockBaseEnvironments + }, + "[processBlocksForApp]: Processing block base environments" + ); + + for (const blockBaseEnvironment of blockBaseEnvironments) { + // 2. find the corresponding environment that is not from the block + const matchingEnv = parsedJson.baseEnvironments.find( + (be) => + be.environmentRoleId === blockBaseEnvironment.environmentRoleId && be.envParentId !== appBlock.blockId + ); + + if (!matchingEnv) { + throw new Error(`Could not find environment for block ${appBlock.blockId}`); + } + + // 3. find all the secrets for this environment block + const blockSecrets = parsedJson.envs[blockBaseEnvironment.id].variables; + + logger.info( + { + blockSecretsLength: Object.keys(blockSecrets).length + }, + "[processBlocksForApp]: Processing block secrets" + ); + + // 4. process each secret + for (const secret of Object.keys(blockSecrets)) { + const selectedSecret = blockSecrets[secret]; + + if (selectedSecret.inheritsEnvironmentId) { + const resolvedSecret = findRootInheritedSecret(selectedSecret, secret, parsedJson.envs); + + // If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence. + const preExistingSecretIndex = infisicalImportData.secrets.findIndex( + (s) => s.name === secret && s.environmentId === matchingEnv.id + ); + + if (preExistingSecretIndex !== -1) { + const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex]; + + if ( + preExistingSecret.appBlockOrderIndex !== undefined && + appBlock.orderIndex > preExistingSecret.appBlockOrderIndex + ) { + // if the existing secret has a lower orderIndex, we should replace it + infisicalImportData.secrets[preExistingSecretIndex] = { + ...preExistingSecret, + value: selectedSecret.val || "", + appBlockOrderIndex: appBlock.orderIndex + }; + } + + // eslint-disable-next-line no-continue + continue; + } + + infisicalImportData.secrets.push({ + id: randomUUID(), + name: secret, + environmentId: matchingEnv.id, + value: resolvedSecret.val || "", + appBlockOrderIndex: appBlock.orderIndex + }); + } else { + // If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence. + const preExistingSecretIndex = infisicalImportData.secrets.findIndex( + (s) => s.name === secret && s.environmentId === matchingEnv.id + ); + + if (preExistingSecretIndex !== -1) { + const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex]; + + if ( + preExistingSecret.appBlockOrderIndex !== undefined && + appBlock.orderIndex > preExistingSecret.appBlockOrderIndex + ) { + // if the existing secret has a lower orderIndex, we should replace it + infisicalImportData.secrets[preExistingSecretIndex] = { + ...preExistingSecret, + value: selectedSecret.val || "", + appBlockOrderIndex: appBlock.orderIndex + }; + } + + // eslint-disable-next-line no-continue + continue; + } + + infisicalImportData.secrets.push({ + id: randomUUID(), + name: secret, + environmentId: matchingEnv.id, + value: selectedSecret.val || "", + appBlockOrderIndex: appBlock.orderIndex + }); + } + } + } + } + } + }; + + processBranches(); + processBlocksForApp(infisicalImportData.projects.map((app) => app.id)); + + for (const env of Object.keys(parsedJson.envs)) { + // Skip user-specific environments + // eslint-disable-next-line no-continue + if (env.includes("|")) continue; + + const envData = parsedJson.envs[env]; + const baseEnv = parsedJson.baseEnvironments.find((be) => be.id === env); + const subEnv = parsedJson.subEnvironments.find((se) => se.id === env); + + // Skip if we can't find either a base environment or sub-environment + if (!baseEnv && !subEnv) { + logger.info( + { + envId: env + }, + "[parseEnvKeyDataFn]: Could not find base or sub environment for env, skipping" + ); + // eslint-disable-next-line no-continue + continue; + } + + // If this is a base environment of a block, skip it (handled by processBlocksForApp) + if (baseEnv) { + const isBlock = parsedJson.appBlocks.some((block) => block.blockId === baseEnv.envParentId); + if (isBlock) { + logger.info( + { + envId: env, + baseEnv + }, + "[parseEnvKeyDataFn]: Skipping block environment (handled separately)" + ); + // eslint-disable-next-line no-continue + continue; + } + } + + // Process each secret in this environment or branch + for (const [secretName, secretData] of Object.entries(envData.variables)) { + const indexOfExistingSecret = infisicalImportData.secrets.findIndex( + (s) => + s.name === secretName && + (s.environmentId === subEnv?.parentEnvironmentId || s.environmentId === env) && + (s.folderId ? s.folderId === subEnv?.id : true) && + (secretData.val ? s.value === secretData.val : true) + ); + + if (secretData.inheritsEnvironmentId) { + const resolvedSecret = findRootInheritedSecret(secretData, secretName, parsedJson.envs); + // Check if there's already a secret with this name in the environment, if there is, we should override it. Because if there's already one, we know its coming from a block. + // Variables from the normal environment should take precedence over variables from the block. + if (indexOfExistingSecret !== -1) { + // if a existing secret is found, we should replace it directly + const newSecret: (typeof infisicalImportData.secrets)[number] = { + ...infisicalImportData.secrets[indexOfExistingSecret], + value: resolvedSecret.val || "" + }; + + infisicalImportData.secrets[indexOfExistingSecret] = newSecret; + + // eslint-disable-next-line no-continue + continue; + } + + infisicalImportData.secrets.push({ + id: randomUUID(), + name: secretName, + environmentId: subEnv ? subEnv.parentEnvironmentId : env, + value: resolvedSecret.val || "", + ...(subEnv && { folderId: subEnv.id }) // Add folderId if this is a branch secret + }); + } else { + // Check if there's already a secret with this name in the environment, if there is, we should override it. Because if there's already one, we know its coming from a block. + // Variables from the normal environment should take precedence over variables from the block. + + if (indexOfExistingSecret !== -1) { + // if a existing secret is found, we should replace it directly + const newSecret: (typeof infisicalImportData.secrets)[number] = { + ...infisicalImportData.secrets[indexOfExistingSecret], + value: secretData.val || "" + }; + + infisicalImportData.secrets[indexOfExistingSecret] = newSecret; + + // eslint-disable-next-line no-continue + continue; + } + + const folderId = targetIdToFolderIdsMap.get(subEnv?.id || "") || subEnv?.id; + + infisicalImportData.secrets.push({ + id: randomUUID(), + name: secretName, + environmentId: subEnv ? subEnv.parentEnvironmentId : env, + value: secretData.val || "", + ...(folderId && { folderId }) + }); + } + } + } + + return infisicalImportData; +}; + +export const importDataIntoInfisicalFn = async ({ + projectService, + projectEnvDAL, + projectDAL, + secretDAL, + kmsService, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + folderDAL, + input: { data, actor, actorId, actorOrgId, actorAuthMethod } +}: TImportDataIntoInfisicalDTO) => { + // Import data to infisical + if (!data || !data.projects) { + throw new BadRequestError({ message: "No projects found in data" }); + } + + const originalToNewProjectId = new Map(); + const originalToNewEnvironmentId = new Map< + string, + { envId: string; envSlug: string; rootFolderId: string; projectId: string } + >(); + const originalToNewFolderId = new Map< + string, + { + folderId: string; + projectId: string; + } + >(); + const projectsNotImported: string[] = []; + + await projectDAL.transaction(async (tx) => { + for await (const project of data.projects) { + const newProject = await projectService + .createProject({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + workspaceName: project.name, + createDefaultEnvs: false, + tx + }) + .catch((e) => { + logger.error(e, `Failed to import to project [name:${project.name}]`); + throw new BadRequestError({ message: `Failed to import to project [name:${project.name}]` }); + }); + originalToNewProjectId.set(project.id, newProject.id); + } + + // Import environments + if (data.environments) { + for await (const environment of data.environments) { + const projectId = originalToNewProjectId.get(environment.projectId); + const slug = slugify(`${environment.name}-${alphaNumericNanoId(4)}`); + + if (!projectId) { + projectsNotImported.push(environment.projectId); + // eslint-disable-next-line no-continue + continue; + } + + const existingEnv = await projectEnvDAL.findOne({ projectId, slug }, tx); + + if (existingEnv) { + throw new BadRequestError({ + message: `Environment with slug '${slug}' already exist`, + name: "CreateEnvironment" + }); + } + + const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx); + const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx); + const folder = await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx); + + originalToNewEnvironmentId.set(environment.id, { + envSlug: doc.slug, + envId: doc.id, + rootFolderId: folder.id, + projectId + }); + } + } + + if (data.folders) { + for await (const folder of data.folders) { + const parentEnv = originalToNewEnvironmentId.get(folder.parentFolderId as string); + + if (!parentEnv) { + // eslint-disable-next-line no-continue + continue; + } + + const newFolder = await folderDAL.create( + { + name: folder.name, + envId: parentEnv.envId, + parentId: parentEnv.rootFolderId + }, + tx + ); + + originalToNewFolderId.set(folder.id, { + folderId: newFolder.id, + projectId: parentEnv.projectId + }); + } + } + + // Useful for debugging: + // console.log("data.secrets", data.secrets); + // console.log("data.folders", data.folders); + // console.log("data.environment", data.environments); + + if (data.secrets && data.secrets.length > 0) { + const mappedToEnvironmentId = new Map< + string, + { + secretKey: string; + secretValue: string; + folderId?: string; + isFromBlock?: boolean; + }[] + >(); + + for (const secret of data.secrets) { + const targetId = secret.folderId || secret.environmentId; + + // Skip if we can't find either an environment or folder mapping for this secret + if (!originalToNewEnvironmentId.get(secret.environmentId) && !originalToNewFolderId.get(targetId)) { + logger.info({ secret }, "[importDataIntoInfisicalFn]: Could not find environment or folder for secret"); + + // eslint-disable-next-line no-continue + continue; + } + + if (!mappedToEnvironmentId.has(targetId)) { + mappedToEnvironmentId.set(targetId, []); + } + + const alreadyHasSecret = mappedToEnvironmentId + .get(targetId)! + .find((el) => el.secretKey === secret.name && el.folderId === secret.folderId); + + if (alreadyHasSecret && alreadyHasSecret.isFromBlock) { + // remove the existing secret if any + mappedToEnvironmentId + .get(targetId)! + .splice(mappedToEnvironmentId.get(targetId)!.indexOf(alreadyHasSecret), 1); + } + mappedToEnvironmentId.get(targetId)!.push({ + secretKey: secret.name, + secretValue: secret.value || "", + folderId: secret.folderId, + isFromBlock: secret.appBlockOrderIndex !== undefined + }); + } + + // for each of the mappedEnvironmentId + for await (const [targetId, secrets] of mappedToEnvironmentId) { + logger.info("[importDataIntoInfisicalFn]: Processing secrets for targetId", targetId); + + let selectedFolder: TSecretFolders | undefined; + let selectedProjectId: string | undefined; + + // Case 1: Secret belongs to a folder / branch / branch of a block + const foundFolder = originalToNewFolderId.get(targetId); + if (foundFolder) { + logger.info("[importDataIntoInfisicalFn]: Processing secrets for folder"); + selectedFolder = await folderDAL.findById(foundFolder.folderId, tx); + selectedProjectId = foundFolder.projectId; + } else { + logger.info("[importDataIntoInfisicalFn]: Processing secrets for normal environment"); + const environment = data.environments.find((env) => env.id === targetId); + if (!environment) { + logger.info( + { + targetId + }, + "[importDataIntoInfisicalFn]: Could not find environment for secret" + ); + // eslint-disable-next-line no-continue + continue; + } + + const projectId = originalToNewProjectId.get(environment.projectId)!; + + if (!projectId) { + throw new BadRequestError({ message: `Failed to import secret, project not found` }); + } + + const env = originalToNewEnvironmentId.get(targetId); + if (!env) { + logger.info( + { + targetId + }, + "[importDataIntoInfisicalFn]: Could not find environment for secret" + ); + + // eslint-disable-next-line no-continue + continue; + } + + const folder = await folderDAL.findBySecretPath(projectId, env.envSlug, "/", tx); + + if (!folder) { + throw new NotFoundError({ + message: `Folder not found for the given environment slug (${env.envSlug}) & secret path (/)`, + name: "Create secret" + }); + } + + selectedFolder = folder; + selectedProjectId = projectId; + } + + if (!selectedFolder) { + throw new NotFoundError({ + message: `Folder not found for the given environment slug & secret path`, + name: "CreateSecret" + }); + } + + if (!selectedProjectId) { + throw new NotFoundError({ + message: `Project not found for the given environment slug & secret path`, + name: "CreateSecret" + }); + } + + const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey( + { + type: KmsDataKey.SecretManager, + projectId: selectedProjectId + }, + tx + ); + + const secretBatches = chunkArray(secrets, 2500); + for await (const secretBatch of secretBatches) { + const secretsByKeys = await secretDAL.findBySecretKeys( + selectedFolder.id, + secretBatch.map((el) => ({ + key: el.secretKey, + type: SecretType.Shared + })), + tx + ); + if (secretsByKeys.length) { + throw new BadRequestError({ + message: `Secret already exist: ${secretsByKeys.map((el) => el.key).join(",")}` + }); + } + await fnSecretBulkInsert({ + inputSecrets: secretBatch.map((el) => { + const references = getAllSecretReferences(el.secretValue).nestedReferences; + + return { + version: 1, + encryptedValue: el.secretValue + ? secretManagerEncrypt({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob + : undefined, + key: el.secretKey, + references, + type: SecretType.Shared + }; + }), + folderId: selectedFolder.id, + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + tx + }); + } + } + } + }); + + return { projectsNotImported }; +}; diff --git a/backend/src/services/external-migration/external-migration-queue.ts b/backend/src/services/external-migration/external-migration-queue.ts new file mode 100644 index 0000000000..3cbe8b616d --- /dev/null +++ b/backend/src/services/external-migration/external-migration-queue.ts @@ -0,0 +1,152 @@ +import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas"; +import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; +import { logger } from "@app/lib/logger"; +import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; + +import { TKmsServiceFactory } from "../kms/kms-service"; +import { TProjectDALFactory } from "../project/project-dal"; +import { TProjectServiceFactory } from "../project/project-service"; +import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; +import { TProjectEnvServiceFactory } from "../project-env/project-env-service"; +import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; +import { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service"; +import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal"; +import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; +import { importDataIntoInfisicalFn } from "./external-migration-fns"; +import { ExternalPlatforms, TImportInfisicalDataCreate } from "./external-migration-types"; + +export type TExternalMigrationQueueFactoryDep = { + smtpService: TSmtpService; + queueService: TQueueServiceFactory; + + projectDAL: Pick; + projectEnvDAL: Pick; + kmsService: Pick; + + secretDAL: Pick; + secretVersionDAL: Pick; + secretTagDAL: Pick; + secretVersionTagDAL: Pick; + + folderDAL: Pick; + projectService: Pick; + projectEnvService: Pick; + secretV2BridgeService: Pick; +}; + +export type TExternalMigrationQueueFactory = ReturnType; + +export const externalMigrationQueueFactory = ({ + queueService, + projectService, + smtpService, + projectDAL, + projectEnvService, + secretV2BridgeService, + kmsService, + projectEnvDAL, + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + folderDAL +}: TExternalMigrationQueueFactoryDep) => { + const startImport = async (dto: { + actorEmail: string; + data: { + iv: string; + tag: string; + ciphertext: string; + algorithm: SecretEncryptionAlgo; + encoding: SecretKeyEncoding; + }; + }) => { + await queueService.queue( + QueueName.ImportSecretsFromExternalSource, + QueueJobs.ImportSecretsFromExternalSource, + dto, + { + removeOnComplete: true, + removeOnFail: true + } + ); + }; + + queueService.start(QueueName.ImportSecretsFromExternalSource, async (job) => { + try { + const { data, actorEmail } = job.data; + + await smtpService.sendMail({ + recipients: [actorEmail], + subjectLine: "Infisical import started", + substitutions: { + provider: ExternalPlatforms.EnvKey + }, + template: SmtpTemplates.ExternalImportStarted + }); + + const decrypted = infisicalSymmetricDecrypt({ + ciphertext: data.ciphertext, + iv: data.iv, + keyEncoding: data.encoding, + tag: data.tag + }); + + const decryptedJson = JSON.parse(decrypted) as TImportInfisicalDataCreate; + + const { projectsNotImported } = await importDataIntoInfisicalFn({ + input: decryptedJson, + projectDAL, + projectEnvDAL, + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + folderDAL, + kmsService, + projectService, + projectEnvService, + secretV2BridgeService + }); + + if (projectsNotImported.length) { + logger.info( + { + actorEmail, + actorOrgId: decryptedJson.actorOrgId, + projectsNotImported + }, + "One or more projects were not imported during import from external source" + ); + } + + await smtpService.sendMail({ + recipients: [actorEmail], + subjectLine: "Infisical import successful", + substitutions: { + provider: ExternalPlatforms.EnvKey + }, + template: SmtpTemplates.ExternalImportSuccessful + }); + } catch (err) { + await smtpService.sendMail({ + recipients: [job.data.actorEmail], + subjectLine: "Infisical import failed", + substitutions: { + provider: ExternalPlatforms.EnvKey, + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment + error: (err as any)?.message || "Unknown error" + }, + template: SmtpTemplates.ExternalImportFailed + }); + + logger.error(err, "Failed to import data from external source"); + } + }); + return { + startImport + }; +}; diff --git a/backend/src/services/external-migration/external-migration-service.ts b/backend/src/services/external-migration/external-migration-service.ts new file mode 100644 index 0000000000..700819022c --- /dev/null +++ b/backend/src/services/external-migration/external-migration-service.ts @@ -0,0 +1,66 @@ +import { OrgMembershipRole } from "@app/db/schemas"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; +import { ForbiddenRequestError } from "@app/lib/errors"; + +import { TUserDALFactory } from "../user/user-dal"; +import { decryptEnvKeyDataFn, parseEnvKeyDataFn } from "./external-migration-fns"; +import { TExternalMigrationQueueFactory } from "./external-migration-queue"; +import { TImportEnvKeyDataCreate } from "./external-migration-types"; + +type TExternalMigrationServiceFactoryDep = { + permissionService: TPermissionServiceFactory; + externalMigrationQueue: TExternalMigrationQueueFactory; + userDAL: Pick; +}; + +export type TExternalMigrationServiceFactory = ReturnType; + +export const externalMigrationServiceFactory = ({ + permissionService, + externalMigrationQueue, + userDAL +}: TExternalMigrationServiceFactoryDep) => { + const importEnvKeyData = async ({ + decryptionKey, + encryptedJson, + actor, + actorId, + actorOrgId, + actorAuthMethod + }: TImportEnvKeyDataCreate) => { + const { membership } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + if (membership.role !== OrgMembershipRole.Admin) { + throw new ForbiddenRequestError({ message: "Only admins can import data" }); + } + + const user = await userDAL.findById(actorId); + const json = await decryptEnvKeyDataFn(decryptionKey, encryptedJson); + const envKeyData = await parseEnvKeyDataFn(json); + + const stringifiedJson = JSON.stringify({ + data: envKeyData, + actor, + actorId, + actorOrgId, + actorAuthMethod + }); + + const encrypted = infisicalSymmetricEncypt(stringifiedJson); + + await externalMigrationQueue.startImport({ + actorEmail: user.email!, + data: encrypted + }); + }; + + return { + importEnvKeyData + }; +}; diff --git a/backend/src/services/external-migration/external-migration-types.ts b/backend/src/services/external-migration/external-migration-types.ts new file mode 100644 index 0000000000..32c70a6885 --- /dev/null +++ b/backend/src/services/external-migration/external-migration-types.ts @@ -0,0 +1,101 @@ +import { ActorAuthMethod, ActorType } from "../auth/auth-type"; + +export type InfisicalImportData = { + projects: Array<{ name: string; id: string }>; + environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }>; + folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }>; + secrets: Array<{ + id: string; + name: string; + environmentId: string; + value: string; + folderId?: string; + appBlockOrderIndex?: number; // Not used for infisical import, only used for building the import structure to determine which block(s) take precedence. + }>; +}; + +export type TImportEnvKeyDataCreate = { + decryptionKey: string; + encryptedJson: { nonce: string; data: string }; + actor: ActorType; + actorId: string; + actorOrgId: string; + actorAuthMethod: ActorAuthMethod; +}; + +export type TImportInfisicalDataCreate = { + data: InfisicalImportData; + actor: ActorType; + actorId: string; + actorOrgId: string; + actorAuthMethod: ActorAuthMethod; +}; + +export type TEnvKeyExportJSON = { + schemaVersion: string; + org: { + id: string; + name: string; + }; + + // Apps are projects + apps: { + id: string; + name: string; + }[]; + // Blocks are basically global projects that can be imported in other projects + blocks: { + id: string; + name: string; + }[]; + + appBlocks: { + appId: string; + blockId: string; + orderIndex: number; + }[]; + + defaultEnvironmentRoles: { + id: string; + defaultName: string; + }[]; + + nonDefaultEnvironmentRoles: { + id: string; + name: string; + }[]; + + baseEnvironments: { + id: string; + envParentId: string; + environmentRoleId: string; + }[]; + + // Branches for both blocks and apps + subEnvironments: { + id: string; + envParentId: string; + environmentRoleId: string; + parentEnvironmentId: string; + subName: string; + }[]; + + envs: Record< + string, + { + variables: Record< + string, + { + val?: string; + inheritsEnvironmentId?: string; + } + >; + + inherits: Record; + } + >; +}; + +export enum ExternalPlatforms { + EnvKey = "EnvKey" +} diff --git a/backend/src/services/group-project/group-project-dal.ts b/backend/src/services/group-project/group-project-dal.ts index 3b0523dde3..a776f7245f 100644 --- a/backend/src/services/group-project/group-project-dal.ts +++ b/backend/src/services/group-project/group-project-dal.ts @@ -1,7 +1,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; +import { TableName, TUserEncryptionKeys } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, sqlNestRelationships } from "@app/lib/knex"; @@ -10,10 +10,15 @@ export type TGroupProjectDALFactory = ReturnType; export const groupProjectDALFactory = (db: TDbClient) => { const groupProjectOrm = ormify(db, TableName.GroupProjectMembership); - const findByProjectId = async (projectId: string, tx?: Knex) => { + const findByProjectId = async (projectId: string, filter?: { groupId?: string }, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.GroupProjectMembership) + const docs = await (tx || db.replicaNode())(TableName.GroupProjectMembership) .where(`${TableName.GroupProjectMembership}.projectId`, projectId) + .where((qb) => { + if (filter?.groupId) { + void qb.where(`${TableName.Groups}.id`, "=", filter.groupId); + } + }) .join(TableName.Groups, `${TableName.GroupProjectMembership}.groupId`, `${TableName.Groups}.id`) .join( TableName.GroupProjectMembershipRole, @@ -95,5 +100,144 @@ export const groupProjectDALFactory = (db: TDbClient) => { } }; - return { ...groupProjectOrm, findByProjectId }; + const findByUserId = async (userId: string, orgId: string, tx?: Knex) => { + try { + const docs = await (tx || db.replicaNode())(TableName.UserGroupMembership) + .where(`${TableName.UserGroupMembership}.userId`, userId) + .join(TableName.Groups, function () { + this.on(`${TableName.UserGroupMembership}.groupId`, "=", `${TableName.Groups}.id`).andOn( + `${TableName.Groups}.orgId`, + "=", + db.raw("?", [orgId]) + ); + }) + .select( + db.ref("id").withSchema(TableName.Groups), + db.ref("name").withSchema(TableName.Groups), + db.ref("slug").withSchema(TableName.Groups), + db.ref("orgId").withSchema(TableName.Groups) + ); + + return docs; + } catch (error) { + throw new DatabaseError({ error, name: "FindByUserId" }); + } + }; + + // The GroupProjectMembership table has a reference to the project (projectId) AND the group (groupId). + // We need to join the GroupProjectMembership table with the Groups table to get the group name and slug. + // We also need to join the GroupProjectMembershipRole table to get the role of the group in the project. + const findAllProjectGroupMembers = async (projectId: string) => { + const docs = await db(TableName.UserGroupMembership) + // Join the GroupProjectMembership table with the Groups table to get the group name and slug. + .join( + TableName.GroupProjectMembership, + `${TableName.UserGroupMembership}.groupId`, + `${TableName.GroupProjectMembership}.groupId` // this gives us access to the project id in the group membership + ) + + .join(TableName.Project, `${TableName.GroupProjectMembership}.projectId`, `${TableName.Project}.id`) + + .where(`${TableName.GroupProjectMembership}.projectId`, projectId) + + .join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`) + .join( + TableName.UserEncryptionKey, + `${TableName.UserEncryptionKey}.userId`, + `${TableName.Users}.id` + ) + .join( + TableName.GroupProjectMembershipRole, + `${TableName.GroupProjectMembershipRole}.projectMembershipId`, + `${TableName.GroupProjectMembership}.id` + ) + .leftJoin( + TableName.ProjectRoles, + `${TableName.GroupProjectMembershipRole}.customRoleId`, + `${TableName.ProjectRoles}.id` + ) + .select( + db.ref("id").withSchema(TableName.UserGroupMembership), + db.ref("createdAt").withSchema(TableName.UserGroupMembership), + db.ref("isGhost").withSchema(TableName.Users), + db.ref("username").withSchema(TableName.Users), + db.ref("email").withSchema(TableName.Users), + db.ref("publicKey").withSchema(TableName.UserEncryptionKey), + db.ref("firstName").withSchema(TableName.Users), + db.ref("lastName").withSchema(TableName.Users), + db.ref("id").withSchema(TableName.Users).as("userId"), + db.ref("role").withSchema(TableName.GroupProjectMembershipRole), + db.ref("id").withSchema(TableName.GroupProjectMembershipRole).as("membershipRoleId"), + db.ref("customRoleId").withSchema(TableName.GroupProjectMembershipRole), + db.ref("name").withSchema(TableName.ProjectRoles).as("customRoleName"), + db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"), + db.ref("temporaryMode").withSchema(TableName.GroupProjectMembershipRole), + db.ref("isTemporary").withSchema(TableName.GroupProjectMembershipRole), + db.ref("temporaryRange").withSchema(TableName.GroupProjectMembershipRole), + db.ref("temporaryAccessStartTime").withSchema(TableName.GroupProjectMembershipRole), + db.ref("temporaryAccessEndTime").withSchema(TableName.GroupProjectMembershipRole), + db.ref("name").as("projectName").withSchema(TableName.Project) + ) + .where({ isGhost: false }); + + const members = sqlNestRelationships({ + data: docs, + parentMapper: ({ + email, + firstName, + username, + lastName, + publicKey, + isGhost, + id, + userId, + projectName, + createdAt + }) => ({ + isGroupMember: true, + id, + userId, + projectId, + project: { + id: projectId, + name: projectName + }, + user: { email, username, firstName, lastName, id: userId, publicKey, isGhost }, + createdAt + }), + key: "id", + childrenMapper: [ + { + label: "roles" as const, + key: "membershipRoleId", + mapper: ({ + role, + customRoleId, + customRoleName, + customRoleSlug, + membershipRoleId, + temporaryRange, + temporaryMode, + temporaryAccessEndTime, + temporaryAccessStartTime, + isTemporary + }) => ({ + id: membershipRoleId, + role, + customRoleId, + customRoleName, + customRoleSlug, + temporaryRange, + temporaryMode, + temporaryAccessEndTime, + temporaryAccessStartTime, + isTemporary + }) + } + ] + }); + return members; + }; + + return { ...groupProjectOrm, findByProjectId, findByUserId, findAllProjectGroupMembers }; }; diff --git a/backend/src/services/group-project/group-project-service.ts b/backend/src/services/group-project/group-project-service.ts index 17862dd6f6..896afe93df 100644 --- a/backend/src/services/group-project/group-project-service.ts +++ b/backend/src/services/group-project/group-project-service.ts @@ -7,7 +7,7 @@ import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services import { isAtLeastAsPrivileged } from "@app/lib/casl"; import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto"; import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; -import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy } from "@app/lib/fn"; import { TGroupDALFactory } from "../../ee/services/group/group-dal"; @@ -22,6 +22,7 @@ import { TGroupProjectMembershipRoleDALFactory } from "./group-project-membershi import { TCreateProjectGroupDTO, TDeleteProjectGroupDTO, + TGetGroupInProjectDTO, TListProjectGroupDTO, TUpdateProjectGroupDTO } from "./group-project-types"; @@ -33,7 +34,7 @@ type TGroupProjectServiceFactoryDep = { "create" | "transaction" | "insertMany" | "delete" >; userGroupMembershipDAL: Pick; - projectDAL: Pick; + projectDAL: Pick; projectKeyDAL: Pick; projectRoleDAL: Pick; projectBotDAL: TProjectBotDALFactory; @@ -55,19 +56,17 @@ export const groupProjectServiceFactory = ({ permissionService }: TGroupProjectServiceFactoryDep) => { const addGroupToProject = async ({ - groupSlug, actor, actorId, actorOrgId, actorAuthMethod, - projectSlug, - role + roles, + projectId, + groupId }: TCreateProjectGroupDTO) => { - const project = await projectDAL.findOne({ - slug: projectSlug - }); + const project = await projectDAL.findById(projectId); - if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` }); + if (!project) throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` }); if (project.version < 2) throw new BadRequestError({ message: `Failed to add group to E2EE project` }); const { permission } = await permissionService.getProjectPermission( @@ -79,25 +78,51 @@ export const groupProjectServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Groups); - const group = await groupDAL.findOne({ orgId: actorOrgId, slug: groupSlug }); - if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` }); + const group = await groupDAL.findOne({ orgId: actorOrgId, id: groupId }); + if (!group) throw new NotFoundError({ message: `Failed to find group with ID ${groupId}` }); const existingGroup = await groupProjectDAL.findOne({ groupId: group.id, projectId: project.id }); if (existingGroup) throw new BadRequestError({ - message: `Group with slug ${groupSlug} already exists in project with id ${project.id}` + message: `Group with ID ${groupId} already exists in project with id ${project.id}` }); - const { permission: rolePermission, role: customRole } = await permissionService.getProjectPermissionByRole( - role, - project.id + for await (const { role: requestedRoleChange } of roles) { + const { permission: rolePermission } = await permissionService.getProjectPermissionByRole( + requestedRoleChange, + project.id + ); + + const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, rolePermission); + + if (!hasRequiredPrivileges) { + throw new ForbiddenRequestError({ message: "Failed to assign group to a more privileged role" }); + } + } + + // validate custom roles input + const customInputRoles = roles.filter( + ({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole) ); - const hasPrivilege = isAtLeastAsPrivileged(permission, rolePermission); - if (!hasPrivilege) - throw new ForbiddenRequestError({ - message: "Failed to add group to project with more privileged role" + const hasCustomRole = Boolean(customInputRoles.length); + const customRoles = hasCustomRole + ? await projectRoleDAL.find({ + projectId: project.id, + $in: { slug: customInputRoles.map(({ role }) => role) } + }) + : []; + + if (customRoles.length !== customInputRoles.length) { + const customRoleSlugs = customRoles.map((customRole) => customRole.slug); + const missingInputRoles = customInputRoles + .filter((inputRole) => !customRoleSlugs.includes(inputRole.role)) + .map((role) => role.role); + + throw new NotFoundError({ + message: `Custom role/s not found: ${missingInputRoles.join(", ")}` }); - const isCustomRole = Boolean(customRole); + } + const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug); const projectGroup = await groupProjectDAL.transaction(async (tx) => { const groupProjectMembership = await groupProjectDAL.create( @@ -108,14 +133,31 @@ export const groupProjectServiceFactory = ({ tx ); - await groupProjectMembershipRoleDAL.create( - { + const sanitizedProjectMembershipRoles = roles.map((inputRole) => { + const isCustomRole = Boolean(customRolesGroupBySlug?.[inputRole.role]?.[0]); + if (!inputRole.isTemporary) { + return { + projectMembershipId: groupProjectMembership.id, + role: isCustomRole ? ProjectMembershipRole.Custom : inputRole.role, + customRoleId: customRolesGroupBySlug[inputRole.role] ? customRolesGroupBySlug[inputRole.role][0].id : null + }; + } + + // check cron or relative here later for now its just relative + const relativeTimeInMs = ms(inputRole.temporaryRange); + return { projectMembershipId: groupProjectMembership.id, - role: isCustomRole ? ProjectMembershipRole.Custom : role, - customRoleId: customRole?.id - }, - tx - ); + role: isCustomRole ? ProjectMembershipRole.Custom : inputRole.role, + customRoleId: customRolesGroupBySlug[inputRole.role] ? customRolesGroupBySlug[inputRole.role][0].id : null, + isTemporary: true, + temporaryMode: ProjectUserMembershipTemporaryMode.Relative, + temporaryRange: inputRole.temporaryRange, + temporaryAccessStartTime: new Date(inputRole.temporaryAccessStartTime), + temporaryAccessEndTime: new Date(new Date(inputRole.temporaryAccessStartTime).getTime() + relativeTimeInMs) + }; + }); + + await groupProjectMembershipRoleDAL.insertMany(sanitizedProjectMembershipRoles, tx); // share project key with users in group that have not // individually been added to the project and that are not part of @@ -126,24 +168,24 @@ export const groupProjectServiceFactory = ({ const ghostUser = await projectDAL.findProjectGhostUser(project.id, tx); if (!ghostUser) { - throw new BadRequestError({ - message: "Failed to find sudo user" + throw new NotFoundError({ + message: `Failed to find project owner of project with name ${project.name}` }); } const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, project.id, tx); if (!ghostUserLatestKey) { - throw new BadRequestError({ - message: "Failed to find sudo user latest key" + throw new NotFoundError({ + message: `Failed to find project owner's latest key in project with name ${project.name}` }); } const bot = await projectBotDAL.findOne({ projectId: project.id }, tx); if (!bot) { - throw new BadRequestError({ - message: "Failed to find bot" + throw new NotFoundError({ + message: `Failed to find project bot in project with name ${project.name}` }); } @@ -183,19 +225,17 @@ export const groupProjectServiceFactory = ({ }; const updateGroupInProject = async ({ - projectSlug, - groupSlug, + projectId, + groupId, roles, actor, actorId, actorAuthMethod, actorOrgId }: TUpdateProjectGroupDTO) => { - const project = await projectDAL.findOne({ - slug: projectSlug - }); + const project = await projectDAL.findById(projectId); - if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` }); + if (!project) throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -206,11 +246,24 @@ export const groupProjectServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Groups); - const group = await groupDAL.findOne({ orgId: actorOrgId, slug: groupSlug }); - if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` }); + const group = await groupDAL.findOne({ orgId: actorOrgId, id: groupId }); + if (!group) throw new NotFoundError({ message: `Failed to find group with ID ${groupId}` }); const projectGroup = await groupProjectDAL.findOne({ groupId: group.id, projectId: project.id }); - if (!projectGroup) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` }); + if (!projectGroup) throw new NotFoundError({ message: `Failed to find group with ID ${groupId}` }); + + for await (const { role: requestedRoleChange } of roles) { + const { permission: rolePermission } = await permissionService.getProjectPermissionByRole( + requestedRoleChange, + project.id + ); + + const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, rolePermission); + + if (!hasRequiredPrivileges) { + throw new ForbiddenRequestError({ message: "Failed to assign group to a more privileged role" }); + } + } // validate custom roles input const customInputRoles = roles.filter( @@ -223,7 +276,16 @@ export const groupProjectServiceFactory = ({ $in: { slug: customInputRoles.map(({ role }) => role) } }) : []; - if (customRoles.length !== customInputRoles.length) throw new BadRequestError({ message: "Custom role not found" }); + if (customRoles.length !== customInputRoles.length) { + const customRoleSlugs = customRoles.map((customRole) => customRole.slug); + const missingInputRoles = customInputRoles + .filter((inputRole) => !customRoleSlugs.includes(inputRole.role)) + .map((role) => role.role); + + throw new NotFoundError({ + message: `Custom role/s not found: ${missingInputRoles.join(", ")}` + }); + } const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug); @@ -260,24 +322,22 @@ export const groupProjectServiceFactory = ({ }; const removeGroupFromProject = async ({ - projectSlug, - groupSlug, + projectId, + groupId, actorId, actor, actorOrgId, actorAuthMethod }: TDeleteProjectGroupDTO) => { - const project = await projectDAL.findOne({ - slug: projectSlug - }); + const project = await projectDAL.findById(projectId); - if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` }); + if (!project) throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` }); - const group = await groupDAL.findOne({ orgId: actorOrgId, slug: groupSlug }); - if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` }); + const group = await groupDAL.findOne({ orgId: actorOrgId, id: groupId }); + if (!group) throw new NotFoundError({ message: `Failed to find group with ID ${groupId}` }); const groupProjectMembership = await groupProjectDAL.findOne({ groupId: group.id, projectId: project.id }); - if (!groupProjectMembership) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` }); + if (!groupProjectMembership) throw new NotFoundError({ message: `Failed to find group with ID ${groupId}` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -311,17 +371,17 @@ export const groupProjectServiceFactory = ({ }; const listGroupsInProject = async ({ - projectSlug, + projectId, actor, actorId, actorAuthMethod, actorOrgId }: TListProjectGroupDTO) => { - const project = await projectDAL.findOne({ - slug: projectSlug - }); + const project = await projectDAL.findById(projectId); - if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` }); + if (!project) { + throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` }); + } const { permission } = await permissionService.getProjectPermission( actor, @@ -336,10 +396,47 @@ export const groupProjectServiceFactory = ({ return groupMemberships; }; + const getGroupInProject = async ({ + actor, + actorId, + actorAuthMethod, + actorOrgId, + groupId, + projectId + }: TGetGroupInProjectDTO) => { + const project = await projectDAL.findById(projectId); + + if (!project) { + throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Groups); + + const [groupMembership] = await groupProjectDAL.findByProjectId(project.id, { + groupId + }); + + if (!groupMembership) { + throw new NotFoundError({ + message: `Group membership with ID ${groupId} not found in project with ID ${projectId}` + }); + } + + return groupMembership; + }; + return { addGroupToProject, updateGroupInProject, removeGroupFromProject, - listGroupsInProject + listGroupsInProject, + getGroupInProject }; }; diff --git a/backend/src/services/group-project/group-project-types.ts b/backend/src/services/group-project/group-project-types.ts index c867b75c04..1e17949637 100644 --- a/backend/src/services/group-project/group-project-types.ts +++ b/backend/src/services/group-project/group-project-types.ts @@ -1,11 +1,23 @@ -import { TProjectSlugPermission } from "@app/lib/types"; +import { TProjectPermission } from "@app/lib/types"; import { ProjectUserMembershipTemporaryMode } from "../project-membership/project-membership-types"; export type TCreateProjectGroupDTO = { - groupSlug: string; - role: string; -} & TProjectSlugPermission; + groupId: string; + roles: ( + | { + role: string; + isTemporary?: false; + } + | { + role: string; + isTemporary: true; + temporaryMode: ProjectUserMembershipTemporaryMode.Relative; + temporaryRange: string; + temporaryAccessStartTime: string; + } + )[]; +} & TProjectPermission; export type TUpdateProjectGroupDTO = { roles: ( @@ -21,11 +33,13 @@ export type TUpdateProjectGroupDTO = { temporaryAccessStartTime: string; } )[]; - groupSlug: string; -} & TProjectSlugPermission; + groupId: string; +} & TProjectPermission; export type TDeleteProjectGroupDTO = { - groupSlug: string; -} & TProjectSlugPermission; + groupId: string; +} & TProjectPermission; -export type TListProjectGroupDTO = TProjectSlugPermission; +export type TListProjectGroupDTO = TProjectPermission; + +export type TGetGroupInProjectDTO = TProjectPermission & { groupId: string }; diff --git a/backend/src/services/identity-access-token/identity-access-token-dal.ts b/backend/src/services/identity-access-token/identity-access-token-dal.ts index a0f9fbc273..f12bd8c158 100644 --- a/backend/src/services/identity-access-token/identity-access-token-dal.ts +++ b/backend/src/services/identity-access-token/identity-access-token-dal.ts @@ -1,9 +1,11 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas"; +import { TableName, TIdentityAccessTokens } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; export type TIdentityAccessTokenDALFactory = ReturnType; @@ -12,45 +14,30 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { const findOne = async (filter: Partial, tx?: Knex) => { try { - const doc = await (tx || db)(TableName.IdentityAccessToken) + const doc = await (tx || db.replicaNode())(TableName.IdentityAccessToken) .where(filter) .join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`) - .leftJoin(TableName.IdentityUaClientSecret, (qb) => { - qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn( - `${TableName.IdentityAccessToken}.identityUAClientSecretId`, - `${TableName.IdentityUaClientSecret}.id` - ); - }) - .leftJoin(TableName.IdentityUniversalAuth, (qb) => { - qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn( - `${TableName.IdentityUaClientSecret}.identityUAId`, - `${TableName.IdentityUniversalAuth}.id` - ); - }) - .leftJoin(TableName.IdentityGcpAuth, (qb) => { - qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.GCP_AUTH])).andOn( - `${TableName.Identity}.id`, - `${TableName.IdentityGcpAuth}.identityId` - ); - }) - .leftJoin(TableName.IdentityAwsAuth, (qb) => { - qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.AWS_AUTH])).andOn( - `${TableName.Identity}.id`, - `${TableName.IdentityAwsAuth}.identityId` - ); - }) - .leftJoin(TableName.IdentityAzureAuth, (qb) => { - qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.AZURE_AUTH])).andOn( - `${TableName.Identity}.id`, - `${TableName.IdentityAzureAuth}.identityId` - ); - }) - .leftJoin(TableName.IdentityKubernetesAuth, (qb) => { - qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.KUBERNETES_AUTH])).andOn( - `${TableName.Identity}.id`, - `${TableName.IdentityKubernetesAuth}.identityId` - ); - }) + .leftJoin( + TableName.IdentityUaClientSecret, + `${TableName.IdentityAccessToken}.identityUAClientSecretId`, + `${TableName.IdentityUaClientSecret}.id` + ) + .leftJoin( + TableName.IdentityUniversalAuth, + `${TableName.IdentityUaClientSecret}.identityUAId`, + `${TableName.IdentityUniversalAuth}.id` + ) + .leftJoin(TableName.IdentityGcpAuth, `${TableName.Identity}.id`, `${TableName.IdentityGcpAuth}.identityId`) + .leftJoin(TableName.IdentityAwsAuth, `${TableName.Identity}.id`, `${TableName.IdentityAwsAuth}.identityId`) + .leftJoin(TableName.IdentityAzureAuth, `${TableName.Identity}.id`, `${TableName.IdentityAzureAuth}.identityId`) + .leftJoin( + TableName.IdentityKubernetesAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityKubernetesAuth}.identityId` + ) + .leftJoin(TableName.IdentityOidcAuth, `${TableName.Identity}.id`, `${TableName.IdentityOidcAuth}.identityId`) + .leftJoin(TableName.IdentityTokenAuth, `${TableName.Identity}.id`, `${TableName.IdentityTokenAuth}.identityId`) + .select(selectAllTableCols(TableName.IdentityAccessToken)) .select( db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityUniversalAuth).as("accessTokenTrustedIpsUa"), @@ -58,6 +45,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityAwsAuth).as("accessTokenTrustedIpsAws"), db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityAzureAuth).as("accessTokenTrustedIpsAzure"), db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityKubernetesAuth).as("accessTokenTrustedIpsK8s"), + db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityOidcAuth).as("accessTokenTrustedIpsOidc"), + db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityTokenAuth).as("accessTokenTrustedIpsToken"), db.ref("name").withSchema(TableName.Identity) ) .first(); @@ -66,12 +55,13 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { return { ...doc, - accessTokenTrustedIps: - doc.accessTokenTrustedIpsUa || - doc.accessTokenTrustedIpsGcp || - doc.accessTokenTrustedIpsAws || - doc.accessTokenTrustedIpsAzure || - doc.accessTokenTrustedIpsK8s + trustedIpsUniversalAuth: doc.accessTokenTrustedIpsUa, + trustedIpsGcpAuth: doc.accessTokenTrustedIpsGcp, + trustedIpsAwsAuth: doc.accessTokenTrustedIpsAws, + trustedIpsAzureAuth: doc.accessTokenTrustedIpsAzure, + trustedIpsKubernetesAuth: doc.accessTokenTrustedIpsK8s, + trustedIpsOidcAuth: doc.accessTokenTrustedIpsOidc, + trustedIpsAccessTokenAuth: doc.accessTokenTrustedIpsToken }; } catch (error) { throw new DatabaseError({ error, name: "IdAccessTokenFindOne" }); @@ -79,6 +69,10 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { }; const removeExpiredTokens = async (tx?: Knex) => { + logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token started`); + + const MAX_TTL = 315_360_000; // Maximum TTL value in seconds (10 years) + try { const docs = (tx || db)(TableName.IdentityAccessToken) .where({ @@ -101,7 +95,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { .whereNotNull("accessTokenLastRenewedAt") // accessTokenLastRenewedAt + convert_integer_to_seconds(accessTokenTTL) < present_date .andWhereRaw( - `"${TableName.IdentityAccessToken}"."accessTokenLastRenewedAt" + make_interval(secs => "${TableName.IdentityAccessToken}"."accessTokenTTL") < NOW()` + `"${TableName.IdentityAccessToken}"."accessTokenLastRenewedAt" + make_interval(secs => LEAST("${TableName.IdentityAccessToken}"."accessTokenTTL", ?)) < NOW()`, + [MAX_TTL] ); }) .orWhere((qb3) => { @@ -109,13 +104,15 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { .whereNull("accessTokenLastRenewedAt") // created + convert_integer_to_seconds(accessTokenTTL) < present_date .andWhereRaw( - `"${TableName.IdentityAccessToken}"."createdAt" + make_interval(secs => "${TableName.IdentityAccessToken}"."accessTokenTTL") < NOW()` + `"${TableName.IdentityAccessToken}"."createdAt" + make_interval(secs => LEAST("${TableName.IdentityAccessToken}"."accessTokenTTL", ?)) < NOW()`, + [MAX_TTL] ); }); }); }) .delete(); - return await docs; + await docs; + logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token completed`); } catch (error) { throw new DatabaseError({ error, name: "IdentityAccessTokenPrune" }); } diff --git a/backend/src/services/identity-access-token/identity-access-token-service.ts b/backend/src/services/identity-access-token/identity-access-token-service.ts index 3e7fe31a6f..a59d1e9594 100644 --- a/backend/src/services/identity-access-token/identity-access-token-service.ts +++ b/backend/src/services/identity-access-token/identity-access-token-service.ts @@ -1,10 +1,11 @@ import jwt, { JwtPayload } from "jsonwebtoken"; -import { TableName, TIdentityAccessTokens } from "@app/db/schemas"; +import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; import { checkIPAgainstBlocklist, TIp } from "@app/lib/ip"; +import { TAccessTokenQueueServiceFactory } from "../access-token-queue/access-token-queue"; import { AuthTokenType } from "../auth/auth-type"; import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; import { TIdentityAccessTokenDALFactory } from "./identity-access-token-dal"; @@ -13,19 +14,24 @@ import { TIdentityAccessTokenJwtPayload, TRenewAccessTokenDTO } from "./identity type TIdentityAccessTokenServiceFactoryDep = { identityAccessTokenDAL: TIdentityAccessTokenDALFactory; identityOrgMembershipDAL: TIdentityOrgDALFactory; + accessTokenQueue: Pick< + TAccessTokenQueueServiceFactory, + "updateIdentityAccessTokenStatus" | "getIdentityTokenDetailsInCache" + >; }; export type TIdentityAccessTokenServiceFactory = ReturnType; export const identityAccessTokenServiceFactory = ({ identityAccessTokenDAL, - identityOrgMembershipDAL + identityOrgMembershipDAL, + accessTokenQueue }: TIdentityAccessTokenServiceFactoryDep) => { const validateAccessTokenExp = async (identityAccessToken: TIdentityAccessTokens) => { const { id: tokenId, - accessTokenTTL, accessTokenNumUses, + accessTokenTTL, accessTokenNumUsesLimit, accessTokenLastRenewedAt, createdAt: accessTokenCreatedAt @@ -33,7 +39,7 @@ export const identityAccessTokenServiceFactory = ({ if (accessTokenNumUsesLimit > 0 && accessTokenNumUses > 0 && accessTokenNumUses >= accessTokenNumUsesLimit) { await identityAccessTokenDAL.deleteById(tokenId); - throw new BadRequestError({ + throw new UnauthorizedError({ message: "Unable to renew because access token number of uses limit reached" }); } @@ -75,15 +81,22 @@ export const identityAccessTokenServiceFactory = ({ const decodedToken = jwt.verify(accessToken, appCfg.AUTH_SECRET) as JwtPayload & { identityAccessTokenId: string; }; - if (decodedToken.authTokenType !== AuthTokenType.IDENTITY_ACCESS_TOKEN) throw new UnauthorizedError(); + if (decodedToken.authTokenType !== AuthTokenType.IDENTITY_ACCESS_TOKEN) { + throw new BadRequestError({ message: "Only identity access tokens can be renewed" }); + } const identityAccessToken = await identityAccessTokenDAL.findOne({ [`${TableName.IdentityAccessToken}.id` as "id"]: decodedToken.identityAccessTokenId, isAccessTokenRevoked: false }); - if (!identityAccessToken) throw new UnauthorizedError(); + if (!identityAccessToken) throw new UnauthorizedError({ message: "No identity access token found" }); - await validateAccessTokenExp(identityAccessToken); + let { accessTokenNumUses } = identityAccessToken; + const tokenStatusInCache = await accessTokenQueue.getIdentityTokenDetailsInCache(identityAccessToken.id); + if (tokenStatusInCache) { + accessTokenNumUses = tokenStatusInCache.numberOfUses; + } + await validateAccessTokenExp({ ...identityAccessToken, accessTokenNumUses }); const { accessTokenMaxTTL, createdAt: accessTokenCreatedAt, accessTokenTTL } = identityAccessToken; @@ -123,15 +136,20 @@ export const identityAccessTokenServiceFactory = ({ const decodedToken = jwt.verify(accessToken, appCfg.AUTH_SECRET) as JwtPayload & { identityAccessTokenId: string; }; - if (decodedToken.authTokenType !== AuthTokenType.IDENTITY_ACCESS_TOKEN) throw new UnauthorizedError(); + if (decodedToken.authTokenType !== AuthTokenType.IDENTITY_ACCESS_TOKEN) { + throw new UnauthorizedError({ message: "Only identity access tokens can be revoked" }); + } const identityAccessToken = await identityAccessTokenDAL.findOne({ [`${TableName.IdentityAccessToken}.id` as "id"]: decodedToken.identityAccessTokenId, isAccessTokenRevoked: false }); - if (!identityAccessToken) throw new UnauthorizedError(); + if (!identityAccessToken) throw new UnauthorizedError({ message: "No identity access token found" }); + + const revokedToken = await identityAccessTokenDAL.updateById(identityAccessToken.id, { + isAccessTokenRevoked: true + }); - const revokedToken = await identityAccessTokenDAL.deleteById(identityAccessToken.id); return { revokedToken }; }; @@ -140,12 +158,28 @@ export const identityAccessTokenServiceFactory = ({ [`${TableName.IdentityAccessToken}.id` as "id"]: token.identityAccessTokenId, isAccessTokenRevoked: false }); - if (!identityAccessToken) throw new UnauthorizedError(); + if (!identityAccessToken) throw new UnauthorizedError({ message: "No identity access token found" }); + if (identityAccessToken.isAccessTokenRevoked) + throw new UnauthorizedError({ + message: "Failed to authorize revoked access token, access token is revoked" + }); - if (ipAddress && identityAccessToken) { + const trustedIpsMap: Record = { + [IdentityAuthMethod.UNIVERSAL_AUTH]: identityAccessToken.trustedIpsUniversalAuth, + [IdentityAuthMethod.GCP_AUTH]: identityAccessToken.trustedIpsGcpAuth, + [IdentityAuthMethod.AWS_AUTH]: identityAccessToken.trustedIpsAwsAuth, + [IdentityAuthMethod.AZURE_AUTH]: identityAccessToken.trustedIpsAzureAuth, + [IdentityAuthMethod.KUBERNETES_AUTH]: identityAccessToken.trustedIpsKubernetesAuth, + [IdentityAuthMethod.OIDC_AUTH]: identityAccessToken.trustedIpsOidcAuth, + [IdentityAuthMethod.TOKEN_AUTH]: identityAccessToken.trustedIpsAccessTokenAuth + }; + + const trustedIps = trustedIpsMap[identityAccessToken.authMethod as IdentityAuthMethod]; + + if (ipAddress) { checkIPAgainstBlocklist({ ipAddress, - trustedIps: identityAccessToken?.accessTokenTrustedIps as TIp[] + trustedIps: trustedIps as TIp[] }); } @@ -154,17 +188,17 @@ export const identityAccessTokenServiceFactory = ({ }); if (!identityOrgMembership) { - throw new UnauthorizedError({ message: "Identity does not belong to any organization" }); + throw new BadRequestError({ message: "Identity does not belong to any organization" }); } - await validateAccessTokenExp(identityAccessToken); + let { accessTokenNumUses } = identityAccessToken; + const tokenStatusInCache = await accessTokenQueue.getIdentityTokenDetailsInCache(identityAccessToken.id); + if (tokenStatusInCache) { + accessTokenNumUses = tokenStatusInCache.numberOfUses; + } + await validateAccessTokenExp({ ...identityAccessToken, accessTokenNumUses }); - await identityAccessTokenDAL.updateById(identityAccessToken.id, { - accessTokenLastUsedAt: new Date(), - $incr: { - accessTokenNumUses: 1 - } - }); + await accessTokenQueue.updateIdentityAccessTokenStatus(identityAccessToken.id, Number(accessTokenNumUses) + 1); return { ...identityAccessToken, orgId: identityOrgMembership.orgId }; }; diff --git a/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts b/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts index a589449093..6295446bdd 100644 --- a/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts +++ b/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts @@ -7,12 +7,12 @@ import { IdentityAuthMethod } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; -import { AuthTokenType } from "../auth/auth-type"; -import { TIdentityDALFactory } from "../identity/identity-dal"; +import { ActorType, AuthTokenType } from "../auth/auth-type"; import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types"; @@ -24,14 +24,14 @@ import { TGetAwsAuthDTO, TGetCallerIdentityResponse, TLoginAwsAuthDTO, + TRevokeAwsAuthDTO, TUpdateAwsAuthDTO } from "./identity-aws-auth-types"; type TIdentityAwsAuthServiceFactoryDep = { identityAccessTokenDAL: Pick; - identityAwsAuthDAL: Pick; + identityAwsAuthDAL: Pick; identityOrgMembershipDAL: Pick; - identityDAL: Pick; licenseService: Pick; permissionService: Pick; }; @@ -42,13 +42,14 @@ export const identityAwsAuthServiceFactory = ({ identityAccessTokenDAL, identityAwsAuthDAL, identityOrgMembershipDAL, - identityDAL, licenseService, permissionService }: TIdentityAwsAuthServiceFactoryDep) => { const login = async ({ identityId, iamHttpRequestMethod, iamRequestBody, iamRequestHeaders }: TLoginAwsAuthDTO) => { const identityAwsAuth = await identityAwsAuthDAL.findOne({ identityId }); - if (!identityAwsAuth) throw new UnauthorizedError(); + if (!identityAwsAuth) { + throw new NotFoundError({ message: "AWS auth method not found for identity, did you configure AWS auth?" }); + } const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityAwsAuth.identityId }); @@ -63,7 +64,7 @@ export const identityAwsAuthServiceFactory = ({ } }: { data: TGetCallerIdentityResponse } = await axios({ method: iamHttpRequestMethod, - url: identityAwsAuth.stsEndpoint, + url: headers?.Host ? `https://${headers.Host}` : identityAwsAuth.stsEndpoint, headers, data: body }); @@ -76,7 +77,10 @@ export const identityAwsAuthServiceFactory = ({ .map((accountId) => accountId.trim()) .some((accountId) => accountId === Account); - if (!isAccountAllowed) throw new UnauthorizedError(); + if (!isAccountAllowed) + throw new UnauthorizedError({ + message: "Access denied: AWS account ID not allowed." + }); } if (identityAwsAuth.allowedPrincipalArns) { @@ -92,7 +96,10 @@ export const identityAwsAuthServiceFactory = ({ return regex.test(extractPrincipalArn(Arn)); }); - if (!isArnAllowed) throw new UnauthorizedError(); + if (!isArnAllowed) + throw new UnauthorizedError({ + message: "Access denied: AWS principal ARN not allowed." + }); } const identityAccessToken = await identityAwsAuthDAL.transaction(async (tx) => { @@ -103,7 +110,8 @@ export const identityAwsAuthServiceFactory = ({ accessTokenTTL: identityAwsAuth.accessTokenTTL, accessTokenMaxTTL: identityAwsAuth.accessTokenMaxTTL, accessTokenNumUses: 0, - accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit + accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit, + authMethod: IdentityAuthMethod.AWS_AUTH }, tx ); @@ -144,11 +152,13 @@ export const identityAwsAuthServiceFactory = ({ actorOrgId }: TAttachAwsAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity.authMethod) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) { throw new BadRequestError({ message: "Failed to add AWS Auth to already configured identity" }); + } if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) { throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); @@ -196,13 +206,6 @@ export const identityAwsAuthServiceFactory = ({ }, tx ); - await identityDAL.updateById( - identityMembershipOrg.identityId, - { - authMethod: IdentityAuthMethod.AWS_AUTH - }, - tx - ); return doc; }); return { ...identityAwsAuth, orgId: identityMembershipOrg.orgId }; @@ -223,11 +226,13 @@ export const identityAwsAuthServiceFactory = ({ actorOrgId }: TUpdateAwsAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH) - throw new BadRequestError({ - message: "Failed to update AWS Auth" + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) { + throw new NotFoundError({ + message: "The identity does not have AWS Auth attached" }); + } const identityAwsAuth = await identityAwsAuthDAL.findOne({ identityId }); @@ -282,11 +287,13 @@ export const identityAwsAuthServiceFactory = ({ const getAwsAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetAwsAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) { throw new BadRequestError({ message: "The identity does not have AWS Auth attached" }); + } const awsIdentityAuth = await identityAwsAuthDAL.findOne({ identityId }); @@ -301,10 +308,54 @@ export const identityAwsAuthServiceFactory = ({ return { ...awsIdentityAuth, orgId: identityMembershipOrg.orgId }; }; + const revokeIdentityAwsAuth = async ({ + identityId, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TRevokeAwsAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have aws auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + + if (!isAtLeastAsPrivileged(permission, rolePermission)) + throw new ForbiddenRequestError({ + message: "Failed to revoke aws auth of identity with more privileged role" + }); + + const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => { + const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx); + return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId }; + }); + return revokedIdentityAwsAuth; + }; + return { login, attachAwsAuth, updateAwsAuth, - getAwsAuth + getAwsAuth, + revokeIdentityAwsAuth }; }; diff --git a/backend/src/services/identity-aws-auth/identity-aws-auth-types.ts b/backend/src/services/identity-aws-auth/identity-aws-auth-types.ts index e45783ae1d..c24186ee0e 100644 --- a/backend/src/services/identity-aws-auth/identity-aws-auth-types.ts +++ b/backend/src/services/identity-aws-auth/identity-aws-auth-types.ts @@ -52,3 +52,7 @@ export type TGetCallerIdentityResponse = { ResponseMetadata: { RequestId: string }; }; }; + +export type TRevokeAwsAuthDTO = { + identityId: string; +} & Omit; diff --git a/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts b/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts index ad9e6f12d4..741d7e63c9 100644 --- a/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts +++ b/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts @@ -17,16 +17,24 @@ export const validateAzureIdentity = async ({ const jwksUri = `https://login.microsoftonline.com/${tenantId}/discovery/keys`; const decodedJwt = jwt.decode(azureJwt, { complete: true }) as TDecodedAzureAuthJwt; + const { kid } = decodedJwt.header; const { data }: { data: TAzureJwksUriResponse } = await axios.get(jwksUri); const signingKeys = data.keys; const signingKey = signingKeys.find((key) => key.kid === kid); - if (!signingKey) throw new UnauthorizedError(); + if (!signingKey) throw new UnauthorizedError({ message: "Invalid signing key" }); const publicKey = `-----BEGIN CERTIFICATE-----\n${signingKey.x5c[0]}\n-----END CERTIFICATE-----`; + // Case: This can happen when the user uses a custom resource (such as https://management.azure.com&client_id=value). + // In this case, the audience in the decoded JWT will not have a trailing slash, but the resource will. + if (!decodedJwt.payload.aud.endsWith("/") && resource.endsWith("/")) { + // eslint-disable-next-line no-param-reassign + resource = resource.slice(0, -1); + } + return jwt.verify(azureJwt, publicKey, { audience: resource, issuer: `https://sts.windows.net/${tenantId}/` diff --git a/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts b/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts index fa439bdc00..cc61df65f8 100644 --- a/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts +++ b/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts @@ -5,12 +5,12 @@ import { IdentityAuthMethod } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; -import { AuthTokenType } from "../auth/auth-type"; -import { TIdentityDALFactory } from "../identity/identity-dal"; +import { ActorType, AuthTokenType } from "../auth/auth-type"; import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types"; @@ -20,14 +20,17 @@ import { TAttachAzureAuthDTO, TGetAzureAuthDTO, TLoginAzureAuthDTO, + TRevokeAzureAuthDTO, TUpdateAzureAuthDTO } from "./identity-azure-auth-types"; type TIdentityAzureAuthServiceFactoryDep = { - identityAzureAuthDAL: Pick; + identityAzureAuthDAL: Pick< + TIdentityAzureAuthDALFactory, + "findOne" | "transaction" | "create" | "updateById" | "delete" + >; identityOrgMembershipDAL: Pick; identityAccessTokenDAL: Pick; - identityDAL: Pick; permissionService: Pick; licenseService: Pick; }; @@ -38,16 +41,17 @@ export const identityAzureAuthServiceFactory = ({ identityAzureAuthDAL, identityOrgMembershipDAL, identityAccessTokenDAL, - identityDAL, permissionService, licenseService }: TIdentityAzureAuthServiceFactoryDep) => { const login = async ({ identityId, jwt: azureJwt }: TLoginAzureAuthDTO) => { const identityAzureAuth = await identityAzureAuthDAL.findOne({ identityId }); - if (!identityAzureAuth) throw new UnauthorizedError(); + if (!identityAzureAuth) { + throw new NotFoundError({ message: "Azure auth method not found for identity, did you configure Azure Auth?" }); + } const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityAzureAuth.identityId }); - if (!identityMembershipOrg) throw new UnauthorizedError(); + if (!identityMembershipOrg) throw new UnauthorizedError({ message: "Identity not attached to a organization" }); const azureIdentity = await validateAzureIdentity({ tenantId: identityAzureAuth.tenantId, @@ -55,7 +59,8 @@ export const identityAzureAuthServiceFactory = ({ jwt: azureJwt }); - if (azureIdentity.tid !== identityAzureAuth.tenantId) throw new UnauthorizedError(); + if (azureIdentity.tid !== identityAzureAuth.tenantId) + throw new UnauthorizedError({ message: "Tenant ID mismatch" }); if (identityAzureAuth.allowedServicePrincipalIds) { // validate if the service principal id is in the list of allowed service principal ids @@ -65,7 +70,7 @@ export const identityAzureAuthServiceFactory = ({ .map((servicePrincipalId) => servicePrincipalId.trim()) .some((servicePrincipalId) => servicePrincipalId === azureIdentity.oid); - if (!isServicePrincipalAllowed) throw new UnauthorizedError(); + if (!isServicePrincipalAllowed) throw new UnauthorizedError({ message: "Service principal not allowed" }); } const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => { @@ -76,7 +81,8 @@ export const identityAzureAuthServiceFactory = ({ accessTokenTTL: identityAzureAuth.accessTokenTTL, accessTokenMaxTTL: identityAzureAuth.accessTokenMaxTTL, accessTokenNumUses: 0, - accessTokenNumUsesLimit: identityAzureAuth.accessTokenNumUsesLimit + accessTokenNumUsesLimit: identityAzureAuth.accessTokenNumUsesLimit, + authMethod: IdentityAuthMethod.AZURE_AUTH }, tx ); @@ -117,12 +123,13 @@ export const identityAzureAuthServiceFactory = ({ actorOrgId }: TAttachAzureAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity.authMethod) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) { throw new BadRequestError({ message: "Failed to add Azure Auth to already configured identity" }); - + } if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) { throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); } @@ -168,13 +175,7 @@ export const identityAzureAuthServiceFactory = ({ }, tx ); - await identityDAL.updateById( - identityMembershipOrg.identityId, - { - authMethod: IdentityAuthMethod.AZURE_AUTH - }, - tx - ); + return doc; }); return { ...identityAzureAuth, orgId: identityMembershipOrg.orgId }; @@ -195,11 +196,12 @@ export const identityAzureAuthServiceFactory = ({ actorOrgId }: TUpdateAzureAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AZURE_AUTH) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) { throw new BadRequestError({ message: "Failed to update Azure Auth" }); + } const identityGcpAuth = await identityAzureAuthDAL.findOne({ identityId }); @@ -257,11 +259,12 @@ export const identityAzureAuthServiceFactory = ({ const getAzureAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetAzureAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AZURE_AUTH) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) { throw new BadRequestError({ message: "The identity does not have Azure Auth attached" }); + } const identityAzureAuth = await identityAzureAuthDAL.findOne({ identityId }); @@ -277,10 +280,53 @@ export const identityAzureAuthServiceFactory = ({ return { ...identityAzureAuth, orgId: identityMembershipOrg.orgId }; }; + const revokeIdentityAzureAuth = async ({ + identityId, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TRevokeAzureAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have azure auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + if (!isAtLeastAsPrivileged(permission, rolePermission)) + throw new ForbiddenRequestError({ + message: "Failed to revoke azure auth of identity with more privileged role" + }); + + const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => { + const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx); + return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId }; + }); + return revokedIdentityAzureAuth; + }; + return { login, attachAzureAuth, updateAzureAuth, - getAzureAuth + getAzureAuth, + revokeIdentityAzureAuth }; }; diff --git a/backend/src/services/identity-azure-auth/identity-azure-auth-types.ts b/backend/src/services/identity-azure-auth/identity-azure-auth-types.ts index 65459003c0..ec03451dbc 100644 --- a/backend/src/services/identity-azure-auth/identity-azure-auth-types.ts +++ b/backend/src/services/identity-azure-auth/identity-azure-auth-types.ts @@ -118,3 +118,7 @@ export type TDecodedAzureAuthJwt = { [key: string]: string; }; }; + +export type TRevokeAzureAuthDTO = { + identityId: string; +} & Omit; diff --git a/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts b/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts index e1afceadaf..05567d1905 100644 --- a/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts +++ b/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts @@ -65,6 +65,6 @@ export const validateIamIdentity = async ({ algorithms: ["RS256"] }); - if (aud !== identityId) throw new UnauthorizedError(); + if (aud !== identityId) throw new UnauthorizedError({ message: "Invalid audience in GCP IAM Token" }); return { email: sub }; }; diff --git a/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts b/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts index 5f829cb335..a2a395f635 100644 --- a/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts +++ b/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts @@ -5,12 +5,12 @@ import { IdentityAuthMethod } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; -import { AuthTokenType } from "../auth/auth-type"; -import { TIdentityDALFactory } from "../identity/identity-dal"; +import { ActorType, AuthTokenType } from "../auth/auth-type"; import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types"; @@ -21,14 +21,14 @@ import { TGcpIdentityDetails, TGetGcpAuthDTO, TLoginGcpAuthDTO, + TRevokeGcpAuthDTO, TUpdateGcpAuthDTO } from "./identity-gcp-auth-types"; type TIdentityGcpAuthServiceFactoryDep = { - identityGcpAuthDAL: Pick; + identityGcpAuthDAL: Pick; identityOrgMembershipDAL: Pick; identityAccessTokenDAL: Pick; - identityDAL: Pick; permissionService: Pick; licenseService: Pick; }; @@ -39,16 +39,19 @@ export const identityGcpAuthServiceFactory = ({ identityGcpAuthDAL, identityOrgMembershipDAL, identityAccessTokenDAL, - identityDAL, permissionService, licenseService }: TIdentityGcpAuthServiceFactoryDep) => { const login = async ({ identityId, jwt: gcpJwt }: TLoginGcpAuthDTO) => { const identityGcpAuth = await identityGcpAuthDAL.findOne({ identityId }); - if (!identityGcpAuth) throw new UnauthorizedError(); + if (!identityGcpAuth) { + throw new NotFoundError({ message: "GCP auth method not found for identity, did you configure GCP auth?" }); + } const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityGcpAuth.identityId }); - if (!identityMembershipOrg) throw new UnauthorizedError(); + if (!identityMembershipOrg) { + throw new UnauthorizedError({ message: "Identity does not belong to any organization" }); + } let gcpIdentityDetails: TGcpIdentityDetails; switch (identityGcpAuth.type) { @@ -79,7 +82,10 @@ export const identityGcpAuthServiceFactory = ({ .map((serviceAccount) => serviceAccount.trim()) .some((serviceAccount) => serviceAccount === gcpIdentityDetails.email); - if (!isServiceAccountAllowed) throw new UnauthorizedError(); + if (!isServiceAccountAllowed) + throw new UnauthorizedError({ + message: "Access denied: GCP service account not allowed." + }); } if (identityGcpAuth.type === "gce" && identityGcpAuth.allowedProjects && gcpIdentityDetails.computeEngineDetails) { @@ -90,7 +96,10 @@ export const identityGcpAuthServiceFactory = ({ .map((project) => project.trim()) .some((project) => project === gcpIdentityDetails.computeEngineDetails?.project_id); - if (!isProjectAllowed) throw new UnauthorizedError(); + if (!isProjectAllowed) + throw new UnauthorizedError({ + message: "Access denied: GCP project not allowed." + }); } if (identityGcpAuth.type === "gce" && identityGcpAuth.allowedZones && gcpIdentityDetails.computeEngineDetails) { @@ -99,7 +108,10 @@ export const identityGcpAuthServiceFactory = ({ .map((zone) => zone.trim()) .some((zone) => zone === gcpIdentityDetails.computeEngineDetails?.zone); - if (!isZoneAllowed) throw new UnauthorizedError(); + if (!isZoneAllowed) + throw new UnauthorizedError({ + message: "Access denied: GCP zone not allowed." + }); } const identityAccessToken = await identityGcpAuthDAL.transaction(async (tx) => { @@ -110,7 +122,8 @@ export const identityGcpAuthServiceFactory = ({ accessTokenTTL: identityGcpAuth.accessTokenTTL, accessTokenMaxTTL: identityGcpAuth.accessTokenMaxTTL, accessTokenNumUses: 0, - accessTokenNumUsesLimit: identityGcpAuth.accessTokenNumUsesLimit + accessTokenNumUsesLimit: identityGcpAuth.accessTokenNumUsesLimit, + authMethod: IdentityAuthMethod.GCP_AUTH }, tx ); @@ -152,11 +165,13 @@ export const identityGcpAuthServiceFactory = ({ actorOrgId }: TAttachGcpAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity.authMethod) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) { throw new BadRequestError({ message: "Failed to add GCP Auth to already configured identity" }); + } if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) { throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); @@ -204,13 +219,6 @@ export const identityGcpAuthServiceFactory = ({ }, tx ); - await identityDAL.updateById( - identityMembershipOrg.identityId, - { - authMethod: IdentityAuthMethod.GCP_AUTH - }, - tx - ); return doc; }); return { ...identityGcpAuth, orgId: identityMembershipOrg.orgId }; @@ -232,11 +240,13 @@ export const identityGcpAuthServiceFactory = ({ actorOrgId }: TUpdateGcpAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.GCP_AUTH) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) { throw new BadRequestError({ message: "Failed to update GCP Auth" }); + } const identityGcpAuth = await identityGcpAuthDAL.findOne({ identityId }); @@ -295,11 +305,13 @@ export const identityGcpAuthServiceFactory = ({ const getGcpAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetGcpAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.GCP_AUTH) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) { throw new BadRequestError({ message: "The identity does not have GCP Auth attached" }); + } const identityGcpAuth = await identityGcpAuthDAL.findOne({ identityId }); @@ -315,10 +327,54 @@ export const identityGcpAuthServiceFactory = ({ return { ...identityGcpAuth, orgId: identityMembershipOrg.orgId }; }; + const revokeIdentityGcpAuth = async ({ + identityId, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TRevokeGcpAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have gcp auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + if (!isAtLeastAsPrivileged(permission, rolePermission)) + throw new ForbiddenRequestError({ + message: "Failed to revoke gcp auth of identity with more privileged role" + }); + + const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => { + const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx); + return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId }; + }); + return revokedIdentityGcpAuth; + }; + return { login, attachGcpAuth, updateGcpAuth, - getGcpAuth + getGcpAuth, + revokeIdentityGcpAuth }; }; diff --git a/backend/src/services/identity-gcp-auth/identity-gcp-auth-types.ts b/backend/src/services/identity-gcp-auth/identity-gcp-auth-types.ts index 60ab36b58a..45e64b24b6 100644 --- a/backend/src/services/identity-gcp-auth/identity-gcp-auth-types.ts +++ b/backend/src/services/identity-gcp-auth/identity-gcp-auth-types.ts @@ -76,3 +76,7 @@ export type TDecodedGcpIamAuthJwt = { [key: string]: string; }; }; + +export type TRevokeGcpAuthDTO = { + identityId: string; +} & Omit; diff --git a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts index 8ee8c36bdd..a99ae7c185 100644 --- a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts +++ b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts @@ -1,5 +1,5 @@ import { ForbiddenError } from "@casl/ability"; -import axios from "axios"; +import axios, { AxiosError } from "axios"; import https from "https"; import jwt from "jsonwebtoken"; @@ -7,6 +7,7 @@ import { IdentityAuthMethod, SecretKeyEncoding, TIdentityKubernetesAuthsUpdate } import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; import { getConfig } from "@app/lib/config/env"; import { decryptSymmetric, @@ -16,12 +17,11 @@ import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal"; -import { AuthTokenType } from "../auth/auth-type"; -import { TIdentityDALFactory } from "../identity/identity-dal"; +import { ActorType, AuthTokenType } from "../auth/auth-type"; import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types"; @@ -32,17 +32,17 @@ import { TCreateTokenReviewResponse, TGetKubernetesAuthDTO, TLoginKubernetesAuthDTO, + TRevokeKubernetesAuthDTO, TUpdateKubernetesAuthDTO } from "./identity-kubernetes-auth-types"; type TIdentityKubernetesAuthServiceFactoryDep = { identityKubernetesAuthDAL: Pick< TIdentityKubernetesAuthDALFactory, - "create" | "findOne" | "transaction" | "updateById" + "create" | "findOne" | "transaction" | "updateById" | "delete" >; identityAccessTokenDAL: Pick; identityOrgMembershipDAL: Pick; - identityDAL: Pick; orgBotDAL: Pick; permissionService: Pick; licenseService: Pick; @@ -54,22 +54,34 @@ export const identityKubernetesAuthServiceFactory = ({ identityKubernetesAuthDAL, identityOrgMembershipDAL, identityAccessTokenDAL, - identityDAL, orgBotDAL, permissionService, licenseService }: TIdentityKubernetesAuthServiceFactoryDep) => { const login = async ({ identityId, jwt: serviceAccountJwt }: TLoginKubernetesAuthDTO) => { const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId }); - if (!identityKubernetesAuth) throw new UnauthorizedError(); + if (!identityKubernetesAuth) { + throw new NotFoundError({ + message: "Kubernetes auth method not found for identity, did you configure Kubernetes auth?" + }); + } const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityKubernetesAuth.identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); + if (!identityMembershipOrg) { + throw new NotFoundError({ + message: `Identity organization membership for identity with ID '${identityKubernetesAuth.identityId}' not found` + }); + } const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }); - if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" }); + if (!orgBot) { + throw new NotFoundError({ + message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`, + name: "OrgBotNotFound" + }); + } const key = infisicalSymmetricDecrypt({ ciphertext: orgBot.encryptedSymmetricKey, @@ -101,31 +113,54 @@ export const identityKubernetesAuthServiceFactory = ({ }); } - const { data }: { data: TCreateTokenReviewResponse } = await axios.post( - `${identityKubernetesAuth.kubernetesHost}/apis/authentication.k8s.io/v1/tokenreviews`, - { - apiVersion: "authentication.k8s.io/v1", - kind: "TokenReview", - spec: { - token: serviceAccountJwt - } - }, - { - headers: { - "Content-Type": "application/json", - Authorization: `Bearer ${tokenReviewerJwt}` + const { data } = await axios + .post( + `${identityKubernetesAuth.kubernetesHost}/apis/authentication.k8s.io/v1/tokenreviews`, + { + apiVersion: "authentication.k8s.io/v1", + kind: "TokenReview", + spec: { + token: serviceAccountJwt + } }, - httpsAgent: new https.Agent({ - ca: caCert, - rejectUnauthorized: !!caCert - }) - } - ); + { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${tokenReviewerJwt}` + }, - if ("error" in data.status) throw new UnauthorizedError({ message: data.status.error }); + // if ca cert, rejectUnauthorized: true + httpsAgent: new https.Agent({ + ca: caCert, + rejectUnauthorized: !!caCert + }) + } + ) + .catch((err) => { + if (err instanceof AxiosError) { + if (err.response) { + const { message } = err?.response?.data as unknown as { message?: string }; + + if (message) { + throw new UnauthorizedError({ + message, + name: "KubernetesTokenReviewRequestError" + }); + } + } + } + throw err; + }); + + if ("error" in data.status) + throw new UnauthorizedError({ message: data.status.error, name: "KubernetesTokenReviewError" }); // check the response to determine if the token is valid - if (!(data.status && data.status.authenticated)) throw new UnauthorizedError(); + if (!(data.status && data.status.authenticated)) + throw new UnauthorizedError({ + message: "Kubernetes token not authenticated", + name: "KubernetesTokenReviewError" + }); const { namespace: targetNamespace, name: targetName } = extractK8sUsername(data.status.user.username); @@ -137,7 +172,10 @@ export const identityKubernetesAuthServiceFactory = ({ .map((namespace) => namespace.trim()) .some((namespace) => namespace === targetNamespace); - if (!isNamespaceAllowed) throw new UnauthorizedError(); + if (!isNamespaceAllowed) + throw new UnauthorizedError({ + message: "Access denied: K8s namespace not allowed." + }); } if (identityKubernetesAuth.allowedNames) { @@ -148,7 +186,10 @@ export const identityKubernetesAuthServiceFactory = ({ .map((name) => name.trim()) .some((name) => name === targetName); - if (!isNameAllowed) throw new UnauthorizedError(); + if (!isNameAllowed) + throw new UnauthorizedError({ + message: "Access denied: K8s name not allowed." + }); } if (identityKubernetesAuth.allowedAudience) { @@ -157,7 +198,10 @@ export const identityKubernetesAuthServiceFactory = ({ (audience) => audience === identityKubernetesAuth.allowedAudience ); - if (!isAudienceAllowed) throw new UnauthorizedError(); + if (!isAudienceAllowed) + throw new UnauthorizedError({ + message: "Access denied: K8s audience not allowed." + }); } const identityAccessToken = await identityKubernetesAuthDAL.transaction(async (tx) => { @@ -168,7 +212,8 @@ export const identityKubernetesAuthServiceFactory = ({ accessTokenTTL: identityKubernetesAuth.accessTokenTTL, accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL, accessTokenNumUses: 0, - accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit + accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit, + authMethod: IdentityAuthMethod.KUBERNETES_AUTH }, tx ); @@ -212,11 +257,13 @@ export const identityKubernetesAuthServiceFactory = ({ actorOrgId }: TAttachKubernetesAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity.authMethod) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) { throw new BadRequestError({ message: "Failed to add Kubernetes Auth to already configured identity" }); + } if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) { throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); @@ -325,13 +372,6 @@ export const identityKubernetesAuthServiceFactory = ({ }, tx ); - await identityDAL.updateById( - identityMembershipOrg.identityId, - { - authMethod: IdentityAuthMethod.KUBERNETES_AUTH - }, - tx - ); return doc; }); @@ -356,11 +396,13 @@ export const identityKubernetesAuthServiceFactory = ({ actorOrgId }: TUpdateKubernetesAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.KUBERNETES_AUTH) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) { throw new BadRequestError({ message: "Failed to update Kubernetes Auth" }); + } const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId }); @@ -413,8 +455,12 @@ export const identityKubernetesAuthServiceFactory = ({ }; const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }); - if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" }); - + if (!orgBot) { + throw new NotFoundError({ + message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`, + name: "OrgBotNotFound" + }); + } const key = infisicalSymmetricDecrypt({ ciphertext: orgBot.encryptedSymmetricKey, iv: orgBot.symmetricKeyIV, @@ -442,7 +488,34 @@ export const identityKubernetesAuthServiceFactory = ({ const updatedKubernetesAuth = await identityKubernetesAuthDAL.updateById(identityKubernetesAuth.id, updateQuery); - return { ...updatedKubernetesAuth, orgId: identityMembershipOrg.orgId }; + const updatedCACert = + updatedKubernetesAuth.encryptedCaCert && updatedKubernetesAuth.caCertIV && updatedKubernetesAuth.caCertTag + ? decryptSymmetric({ + ciphertext: updatedKubernetesAuth.encryptedCaCert, + iv: updatedKubernetesAuth.caCertIV, + tag: updatedKubernetesAuth.caCertTag, + key + }) + : ""; + + const updatedTokenReviewerJwt = + updatedKubernetesAuth.encryptedTokenReviewerJwt && + updatedKubernetesAuth.tokenReviewerJwtIV && + updatedKubernetesAuth.tokenReviewerJwtTag + ? decryptSymmetric({ + ciphertext: updatedKubernetesAuth.encryptedTokenReviewerJwt, + iv: updatedKubernetesAuth.tokenReviewerJwtIV, + tag: updatedKubernetesAuth.tokenReviewerJwtTag, + key + }) + : ""; + + return { + ...updatedKubernetesAuth, + orgId: identityMembershipOrg.orgId, + caCert: updatedCACert, + tokenReviewerJwt: updatedTokenReviewerJwt + }; }; const getKubernetesAuth = async ({ @@ -453,12 +526,13 @@ export const identityKubernetesAuthServiceFactory = ({ actorOrgId }: TGetKubernetesAuthDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.KUBERNETES_AUTH) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) { throw new BadRequestError({ message: "The identity does not have Kubernetes Auth attached" }); - + } const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId }); const { permission } = await permissionService.getOrgPermission( @@ -471,7 +545,11 @@ export const identityKubernetesAuthServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }); - if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" }); + if (!orgBot) + throw new NotFoundError({ + message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`, + name: "OrgBotNotFound" + }); const key = infisicalSymmetricDecrypt({ ciphertext: orgBot.encryptedSymmetricKey, @@ -506,10 +584,54 @@ export const identityKubernetesAuthServiceFactory = ({ return { ...identityKubernetesAuth, caCert, tokenReviewerJwt, orgId: identityMembershipOrg.orgId }; }; + const revokeIdentityKubernetesAuth = async ({ + identityId, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TRevokeKubernetesAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have kubernetes auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + if (!isAtLeastAsPrivileged(permission, rolePermission)) + throw new ForbiddenRequestError({ + message: "Failed to revoke kubernetes auth of identity with more privileged role" + }); + + const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => { + const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx); + return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId }; + }); + return revokedIdentityKubernetesAuth; + }; + return { login, attachKubernetesAuth, updateKubernetesAuth, - getKubernetesAuth + getKubernetesAuth, + revokeIdentityKubernetesAuth }; }; diff --git a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts index dbb42dce89..f1cde2be9a 100644 --- a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts +++ b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts @@ -59,3 +59,7 @@ export type TCreateTokenReviewResponse = { }; status: TCreateTokenReviewSuccessResponse | TCreateTokenReviewErrorResponse; }; + +export type TRevokeKubernetesAuthDTO = { + identityId: string; +} & Omit; diff --git a/backend/src/services/identity-oidc-auth/identity-oidc-auth-dal.ts b/backend/src/services/identity-oidc-auth/identity-oidc-auth-dal.ts new file mode 100644 index 0000000000..1d8ab3c137 --- /dev/null +++ b/backend/src/services/identity-oidc-auth/identity-oidc-auth-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TIdentityOidcAuthDALFactory = ReturnType; + +export const identityOidcAuthDALFactory = (db: TDbClient) => { + const oidcAuthOrm = ormify(db, TableName.IdentityOidcAuth); + return oidcAuthOrm; +}; diff --git a/backend/src/services/identity-oidc-auth/identity-oidc-auth-fns.ts b/backend/src/services/identity-oidc-auth/identity-oidc-auth-fns.ts new file mode 100644 index 0000000000..c6d65d836a --- /dev/null +++ b/backend/src/services/identity-oidc-auth/identity-oidc-auth-fns.ts @@ -0,0 +1,4 @@ +import picomatch from "picomatch"; + +export const doesFieldValueMatchOidcPolicy = (fieldValue: string, policyValue: string) => + policyValue === fieldValue || picomatch.isMatch(fieldValue, policyValue); diff --git a/backend/src/services/identity-oidc-auth/identity-oidc-auth-service.ts b/backend/src/services/identity-oidc-auth/identity-oidc-auth-service.ts new file mode 100644 index 0000000000..02440ebe7e --- /dev/null +++ b/backend/src/services/identity-oidc-auth/identity-oidc-auth-service.ts @@ -0,0 +1,555 @@ +import { ForbiddenError } from "@casl/ability"; +import axios from "axios"; +import https from "https"; +import jwt from "jsonwebtoken"; +import { JwksClient } from "jwks-rsa"; + +import { IdentityAuthMethod, SecretKeyEncoding, TIdentityOidcAuthsUpdate } from "@app/db/schemas"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; +import { getConfig } from "@app/lib/config/env"; +import { generateAsymmetricKeyPair } from "@app/lib/crypto"; +import { + decryptSymmetric, + encryptSymmetric, + generateSymmetricKey, + infisicalSymmetricDecrypt, + infisicalSymmetricEncypt +} from "@app/lib/crypto/encryption"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; + +import { ActorType, AuthTokenType } from "../auth/auth-type"; +import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; +import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; +import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types"; +import { TOrgBotDALFactory } from "../org/org-bot-dal"; +import { TIdentityOidcAuthDALFactory } from "./identity-oidc-auth-dal"; +import { doesFieldValueMatchOidcPolicy } from "./identity-oidc-auth-fns"; +import { + TAttachOidcAuthDTO, + TGetOidcAuthDTO, + TLoginOidcAuthDTO, + TRevokeOidcAuthDTO, + TUpdateOidcAuthDTO +} from "./identity-oidc-auth-types"; + +type TIdentityOidcAuthServiceFactoryDep = { + identityOidcAuthDAL: TIdentityOidcAuthDALFactory; + identityOrgMembershipDAL: Pick; + identityAccessTokenDAL: Pick; + permissionService: Pick; + licenseService: Pick; + orgBotDAL: Pick; +}; + +export type TIdentityOidcAuthServiceFactory = ReturnType; + +export const identityOidcAuthServiceFactory = ({ + identityOidcAuthDAL, + identityOrgMembershipDAL, + permissionService, + licenseService, + identityAccessTokenDAL, + orgBotDAL +}: TIdentityOidcAuthServiceFactoryDep) => { + const login = async ({ identityId, jwt: oidcJwt }: TLoginOidcAuthDTO) => { + const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId }); + if (!identityOidcAuth) { + throw new NotFoundError({ message: "OIDC auth method not found for identity, did you configure OIDC auth?" }); + } + + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ + identityId: identityOidcAuth.identityId + }); + if (!identityMembershipOrg) { + throw new NotFoundError({ + message: `Identity organization membership for identity with ID '${identityOidcAuth.identityId}' not found` + }); + } + + const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }); + if (!orgBot) { + throw new NotFoundError({ + message: `Organization bot not found for organization with ID '${identityMembershipOrg.orgId}'`, + name: "OrgBotNotFound" + }); + } + + const key = infisicalSymmetricDecrypt({ + ciphertext: orgBot.encryptedSymmetricKey, + iv: orgBot.symmetricKeyIV, + tag: orgBot.symmetricKeyTag, + keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding + }); + + const { encryptedCaCert, caCertIV, caCertTag } = identityOidcAuth; + + let caCert = ""; + if (encryptedCaCert && caCertIV && caCertTag) { + caCert = decryptSymmetric({ + ciphertext: encryptedCaCert, + iv: caCertIV, + tag: caCertTag, + key + }); + } + + const requestAgent = new https.Agent({ ca: caCert, rejectUnauthorized: !!caCert }); + const { data: discoveryDoc } = await axios.get<{ jwks_uri: string }>( + `${identityOidcAuth.oidcDiscoveryUrl}/.well-known/openid-configuration`, + { + httpsAgent: requestAgent + } + ); + const jwksUri = discoveryDoc.jwks_uri; + + const decodedToken = jwt.decode(oidcJwt, { complete: true }); + if (!decodedToken) { + throw new UnauthorizedError({ + message: "Invalid JWT" + }); + } + + const client = new JwksClient({ + jwksUri, + requestAgent + }); + + const { kid } = decodedToken.header; + const oidcSigningKey = await client.getSigningKey(kid); + + let tokenData: Record; + try { + tokenData = jwt.verify(oidcJwt, oidcSigningKey.getPublicKey(), { + issuer: identityOidcAuth.boundIssuer + }) as Record; + } catch (error) { + if (error instanceof jwt.JsonWebTokenError) { + throw new UnauthorizedError({ + message: `Access denied: ${error.message}` + }); + } + + throw error; + } + + if (identityOidcAuth.boundSubject) { + if (!doesFieldValueMatchOidcPolicy(tokenData.sub, identityOidcAuth.boundSubject)) { + throw new ForbiddenRequestError({ + message: "Access denied: OIDC subject not allowed." + }); + } + } + + if (identityOidcAuth.boundAudiences) { + if ( + !identityOidcAuth.boundAudiences + .split(", ") + .some((policyValue) => doesFieldValueMatchOidcPolicy(tokenData.aud, policyValue)) + ) { + throw new UnauthorizedError({ + message: "Access denied: OIDC audience not allowed." + }); + } + } + + if (identityOidcAuth.boundClaims) { + Object.keys(identityOidcAuth.boundClaims).forEach((claimKey) => { + const claimValue = (identityOidcAuth.boundClaims as Record)[claimKey]; + // handle both single and multi-valued claims + if ( + !claimValue.split(", ").some((claimEntry) => doesFieldValueMatchOidcPolicy(tokenData[claimKey], claimEntry)) + ) { + throw new UnauthorizedError({ + message: "Access denied: OIDC claim not allowed." + }); + } + }); + } + + const identityAccessToken = await identityOidcAuthDAL.transaction(async (tx) => { + const newToken = await identityAccessTokenDAL.create( + { + identityId: identityOidcAuth.identityId, + isAccessTokenRevoked: false, + accessTokenTTL: identityOidcAuth.accessTokenTTL, + accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL, + accessTokenNumUses: 0, + accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit, + authMethod: IdentityAuthMethod.OIDC_AUTH + }, + tx + ); + return newToken; + }); + + const appCfg = getConfig(); + const accessToken = jwt.sign( + { + identityId: identityOidcAuth.identityId, + identityAccessTokenId: identityAccessToken.id, + authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN + } as TIdentityAccessTokenJwtPayload, + appCfg.AUTH_SECRET, + { + expiresIn: + Number(identityAccessToken.accessTokenMaxTTL) === 0 + ? undefined + : Number(identityAccessToken.accessTokenMaxTTL) + } + ); + + return { accessToken, identityOidcAuth, identityAccessToken, identityMembershipOrg }; + }; + + const attachOidcAuth = async ({ + identityId, + oidcDiscoveryUrl, + caCert, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TAttachOidcAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) { + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + } + if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) { + throw new BadRequestError({ + message: "Failed to add OIDC Auth to already configured identity" + }); + } + + if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) { + throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity); + + const plan = await licenseService.getPlan(identityMembershipOrg.orgId); + const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => { + if ( + !plan.ipAllowlisting && + accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" && + accessTokenTrustedIp.ipAddress !== "::/0" + ) + throw new BadRequestError({ + message: + "Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range." + }); + if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress)) + throw new BadRequestError({ + message: "The IP is not a valid IPv4, IPv6, or CIDR block" + }); + return extractIPDetails(accessTokenTrustedIp.ipAddress); + }); + + const orgBot = await orgBotDAL.transaction(async (tx) => { + const doc = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }, tx); + if (doc) return doc; + + const { privateKey, publicKey } = generateAsymmetricKeyPair(); + const key = generateSymmetricKey(); + const { + ciphertext: encryptedPrivateKey, + iv: privateKeyIV, + tag: privateKeyTag, + encoding: privateKeyKeyEncoding, + algorithm: privateKeyAlgorithm + } = infisicalSymmetricEncypt(privateKey); + const { + ciphertext: encryptedSymmetricKey, + iv: symmetricKeyIV, + tag: symmetricKeyTag, + encoding: symmetricKeyKeyEncoding, + algorithm: symmetricKeyAlgorithm + } = infisicalSymmetricEncypt(key); + + return orgBotDAL.create( + { + name: "Infisical org bot", + publicKey, + privateKeyIV, + encryptedPrivateKey, + symmetricKeyIV, + symmetricKeyTag, + encryptedSymmetricKey, + symmetricKeyAlgorithm, + orgId: identityMembershipOrg.orgId, + privateKeyTag, + privateKeyAlgorithm, + privateKeyKeyEncoding, + symmetricKeyKeyEncoding + }, + tx + ); + }); + + const key = infisicalSymmetricDecrypt({ + ciphertext: orgBot.encryptedSymmetricKey, + iv: orgBot.symmetricKeyIV, + tag: orgBot.symmetricKeyTag, + keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding + }); + + const { ciphertext: encryptedCaCert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key); + + const identityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => { + const doc = await identityOidcAuthDAL.create( + { + identityId: identityMembershipOrg.identityId, + oidcDiscoveryUrl, + encryptedCaCert, + caCertIV, + caCertTag, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject, + accessTokenMaxTTL, + accessTokenTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps) + }, + tx + ); + return doc; + }); + return { ...identityOidcAuth, orgId: identityMembershipOrg.orgId, caCert }; + }; + + const updateOidcAuth = async ({ + identityId, + oidcDiscoveryUrl, + caCert, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TUpdateOidcAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) { + throw new BadRequestError({ + message: "Failed to update OIDC Auth" + }); + } + + const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId }); + + if ( + (accessTokenMaxTTL || identityOidcAuth.accessTokenMaxTTL) > 0 && + (accessTokenTTL || identityOidcAuth.accessTokenMaxTTL) > (accessTokenMaxTTL || identityOidcAuth.accessTokenMaxTTL) + ) { + throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const plan = await licenseService.getPlan(identityMembershipOrg.orgId); + const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => { + if ( + !plan.ipAllowlisting && + accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" && + accessTokenTrustedIp.ipAddress !== "::/0" + ) + throw new BadRequestError({ + message: + "Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range." + }); + if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress)) + throw new BadRequestError({ + message: "The IP is not a valid IPv4, IPv6, or CIDR block" + }); + return extractIPDetails(accessTokenTrustedIp.ipAddress); + }); + + const updateQuery: TIdentityOidcAuthsUpdate = { + oidcDiscoveryUrl, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject, + accessTokenMaxTTL, + accessTokenTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps: reformattedAccessTokenTrustedIps + ? JSON.stringify(reformattedAccessTokenTrustedIps) + : undefined + }; + + const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }); + if (!orgBot) { + throw new NotFoundError({ + message: `Organization bot not found for organization with ID '${identityMembershipOrg.orgId}'`, + name: "OrgBotNotFound" + }); + } + + const key = infisicalSymmetricDecrypt({ + ciphertext: orgBot.encryptedSymmetricKey, + iv: orgBot.symmetricKeyIV, + tag: orgBot.symmetricKeyTag, + keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding + }); + + if (caCert !== undefined) { + const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key); + updateQuery.encryptedCaCert = encryptedCACert; + updateQuery.caCertIV = caCertIV; + updateQuery.caCertTag = caCertTag; + } + + const updatedOidcAuth = await identityOidcAuthDAL.updateById(identityOidcAuth.id, updateQuery); + const updatedCACert = + updatedOidcAuth.encryptedCaCert && updatedOidcAuth.caCertIV && updatedOidcAuth.caCertTag + ? decryptSymmetric({ + ciphertext: updatedOidcAuth.encryptedCaCert, + iv: updatedOidcAuth.caCertIV, + tag: updatedOidcAuth.caCertTag, + key + }) + : ""; + + return { + ...updatedOidcAuth, + orgId: identityMembershipOrg.orgId, + caCert: updatedCACert + }; + }; + + const getOidcAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetOidcAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have OIDC Auth attached" + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); + + const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId }); + + const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }); + if (!orgBot) { + throw new NotFoundError({ + message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`, + name: "OrgBotNotFound" + }); + } + + const key = infisicalSymmetricDecrypt({ + ciphertext: orgBot.encryptedSymmetricKey, + iv: orgBot.symmetricKeyIV, + tag: orgBot.symmetricKeyTag, + keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding + }); + + const caCert = decryptSymmetric({ + ciphertext: identityOidcAuth.encryptedCaCert, + iv: identityOidcAuth.caCertIV, + tag: identityOidcAuth.caCertTag, + key + }); + + return { ...identityOidcAuth, orgId: identityMembershipOrg.orgId, caCert }; + }; + + const revokeOidcAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TRevokeOidcAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) { + throw new NotFoundError({ message: "Failed to find identity" }); + } + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have OIDC auth" + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + + if (!isAtLeastAsPrivileged(permission, rolePermission)) { + throw new ForbiddenRequestError({ + message: "Failed to revoke OIDC auth of identity with more privileged role" + }); + } + + const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => { + const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx); + return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId }; + }); + + return revokedIdentityOidcAuth; + }; + + return { + attachOidcAuth, + updateOidcAuth, + getOidcAuth, + revokeOidcAuth, + login + }; +}; diff --git a/backend/src/services/identity-oidc-auth/identity-oidc-auth-types.ts b/backend/src/services/identity-oidc-auth/identity-oidc-auth-types.ts new file mode 100644 index 0000000000..761f68aa74 --- /dev/null +++ b/backend/src/services/identity-oidc-auth/identity-oidc-auth-types.ts @@ -0,0 +1,42 @@ +import { TProjectPermission } from "@app/lib/types"; + +export type TAttachOidcAuthDTO = { + identityId: string; + oidcDiscoveryUrl: string; + caCert: string; + boundIssuer: string; + boundAudiences: string; + boundClaims: Record; + boundSubject: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: { ipAddress: string }[]; +} & Omit; + +export type TUpdateOidcAuthDTO = { + identityId: string; + oidcDiscoveryUrl?: string; + caCert?: string; + boundIssuer?: string; + boundAudiences?: string; + boundClaims?: Record; + boundSubject?: string; + accessTokenTTL?: number; + accessTokenMaxTTL?: number; + accessTokenNumUsesLimit?: number; + accessTokenTrustedIps?: { ipAddress: string }[]; +} & Omit; + +export type TGetOidcAuthDTO = { + identityId: string; +} & Omit; + +export type TLoginOidcAuthDTO = { + identityId: string; + jwt: string; +}; + +export type TRevokeOidcAuthDTO = { + identityId: string; +} & Omit; diff --git a/backend/src/services/identity-oidc-auth/identity-oidc-auth-validators.ts b/backend/src/services/identity-oidc-auth/identity-oidc-auth-validators.ts new file mode 100644 index 0000000000..f20702604f --- /dev/null +++ b/backend/src/services/identity-oidc-auth/identity-oidc-auth-validators.ts @@ -0,0 +1,25 @@ +import { z } from "zod"; + +export const validateOidcAuthAudiencesField = z + .string() + .trim() + .default("") + .transform((data) => { + if (data === "") return ""; + return data + .split(",") + .map((id) => id.trim()) + .join(", "); + }); + +export const validateOidcBoundClaimsField = z.record(z.string()).transform((data) => { + const formattedClaims: Record = {}; + Object.keys(data).forEach((key) => { + formattedClaims[key] = data[key] + .split(",") + .map((id) => id.trim()) + .join(", "); + }); + + return formattedClaims; +}); diff --git a/backend/src/services/identity-project/identity-project-dal.ts b/backend/src/services/identity-project/identity-project-dal.ts index c1cfe79cc6..fd8eaa15d0 100644 --- a/backend/src/services/identity-project/identity-project-dal.ts +++ b/backend/src/services/identity-project/identity-project-dal.ts @@ -1,23 +1,234 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; +import { + TableName, + TIdentities, + TIdentityAwsAuths, + TIdentityAzureAuths, + TIdentityGcpAuths, + TIdentityKubernetesAuths, + TIdentityOidcAuths, + TIdentityTokenAuths, + TIdentityUniversalAuths +} from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { ormify, sqlNestRelationships } from "@app/lib/knex"; +import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; +import { ProjectIdentityOrderBy, TListProjectIdentityDTO } from "@app/services/identity-project/identity-project-types"; + +import { buildAuthMethods } from "../identity/identity-fns"; export type TIdentityProjectDALFactory = ReturnType; export const identityProjectDALFactory = (db: TDbClient) => { const identityProjectOrm = ormify(db, TableName.IdentityProjectMembership); - const findByProjectId = async (projectId: string, filter: { identityId?: string } = {}, tx?: Knex) => { + const findByIdentityId = async (identityId: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.IdentityProjectMembership) - .where(`${TableName.IdentityProjectMembership}.projectId`, projectId) + const docs = await (tx || db.replicaNode())(TableName.IdentityProjectMembership) + .where(`${TableName.IdentityProjectMembership}.identityId`, identityId) + .join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`) .join(TableName.Identity, `${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`) + .join( + TableName.IdentityProjectMembershipRole, + `${TableName.IdentityProjectMembershipRole}.projectMembershipId`, + `${TableName.IdentityProjectMembership}.id` + ) + .leftJoin( + TableName.ProjectRoles, + `${TableName.IdentityProjectMembershipRole}.customRoleId`, + `${TableName.ProjectRoles}.id` + ) + .leftJoin( + TableName.IdentityProjectAdditionalPrivilege, + `${TableName.IdentityProjectMembership}.id`, + `${TableName.IdentityProjectAdditionalPrivilege}.projectMembershipId` + ) + + .leftJoin( + TableName.IdentityUniversalAuth, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.IdentityUniversalAuth}.identityId` + ) + .leftJoin( + TableName.IdentityGcpAuth, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.IdentityGcpAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAwsAuth, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.IdentityAwsAuth}.identityId` + ) + .leftJoin( + TableName.IdentityKubernetesAuth, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.IdentityKubernetesAuth}.identityId` + ) + .leftJoin( + TableName.IdentityOidcAuth, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.IdentityOidcAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAzureAuth, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.IdentityAzureAuth}.identityId` + ) + .leftJoin( + TableName.IdentityTokenAuth, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.IdentityTokenAuth}.identityId` + ) + + .select( + db.ref("id").withSchema(TableName.IdentityProjectMembership), + db.ref("createdAt").withSchema(TableName.IdentityProjectMembership), + db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership), + + db.ref("id").as("identityId").withSchema(TableName.Identity), + db.ref("name").as("identityName").withSchema(TableName.Identity), + db.ref("id").withSchema(TableName.IdentityProjectMembership), + db.ref("role").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("id").withSchema(TableName.IdentityProjectMembershipRole).as("membershipRoleId"), + db.ref("customRoleId").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("name").withSchema(TableName.ProjectRoles).as("customRoleName"), + db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"), + db.ref("temporaryMode").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("isTemporary").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("temporaryRange").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("temporaryAccessStartTime").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("temporaryAccessEndTime").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("projectId").withSchema(TableName.IdentityProjectMembership), + db.ref("name").as("projectName").withSchema(TableName.Project), + db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth), + db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth), + db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth), + db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth), + db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth), + db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth), + db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth) + ); + + const members = sqlNestRelationships({ + data: docs, + parentMapper: ({ + identityName, + uaId, + awsId, + gcpId, + kubernetesId, + oidcId, + azureId, + tokenId, + id, + createdAt, + updatedAt, + projectId, + projectName + }) => ({ + id, + identityId, + createdAt, + updatedAt, + identity: { + id: identityId, + name: identityName, + authMethods: buildAuthMethods({ + uaId, + awsId, + gcpId, + kubernetesId, + oidcId, + azureId, + tokenId + }) + }, + project: { + id: projectId, + name: projectName + } + }), + key: "id", + childrenMapper: [ + { + label: "roles" as const, + key: "membershipRoleId", + mapper: ({ + role, + customRoleId, + customRoleName, + customRoleSlug, + membershipRoleId, + temporaryRange, + temporaryMode, + temporaryAccessEndTime, + temporaryAccessStartTime, + isTemporary + }) => ({ + id: membershipRoleId, + role, + customRoleId, + customRoleName, + customRoleSlug, + temporaryRange, + temporaryMode, + temporaryAccessEndTime, + temporaryAccessStartTime, + isTemporary + }) + } + ] + }); + return members; + } catch (error) { + throw new DatabaseError({ error, name: "FindByIdentityId" }); + } + }; + + const findByProjectId = async ( + projectId: string, + filter: { identityId?: string } & Pick< + TListProjectIdentityDTO, + "limit" | "offset" | "search" | "orderBy" | "orderDirection" + > = {}, + tx?: Knex + ) => { + try { + // TODO: scott - optimize, there's redundancy here with project membership and the below query + const fetchIdentitySubquery = (tx || db.replicaNode())(TableName.Identity) + .where((qb) => { + if (filter.search) { + void qb.whereILike(`${TableName.Identity}.name`, `%${filter.search}%`); + } + }) + .join( + TableName.IdentityProjectMembership, + `${TableName.IdentityProjectMembership}.identityId`, + `${TableName.Identity}.id` + ) + .where(`${TableName.IdentityProjectMembership}.projectId`, projectId) + .orderBy( + `${TableName.Identity}.${filter.orderBy ?? ProjectIdentityOrderBy.Name}`, + filter.orderDirection ?? OrderByDirection.ASC + ) + .select(selectAllTableCols(TableName.Identity)) + .as(TableName.Identity); // required for subqueries + + if (filter.limit) { + void fetchIdentitySubquery.offset(filter.offset ?? 0).limit(filter.limit); + } + + const query = (tx || db.replicaNode())(TableName.IdentityProjectMembership) + .where(`${TableName.IdentityProjectMembership}.projectId`, projectId) + .join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`) + .join(fetchIdentitySubquery, (bd) => { + bd.on(`${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`); + }) .where((qb) => { if (filter.identityId) { - void qb.where("identityId", filter.identityId); + void qb.where(`${TableName.IdentityProjectMembership}.identityId`, filter.identityId); } }) .join( @@ -35,6 +246,43 @@ export const identityProjectDALFactory = (db: TDbClient) => { `${TableName.IdentityProjectMembership}.id`, `${TableName.IdentityProjectAdditionalPrivilege}.projectMembershipId` ) + + .leftJoin( + TableName.IdentityUniversalAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityUniversalAuth}.identityId` + ) + .leftJoin( + TableName.IdentityGcpAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityGcpAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAwsAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityAwsAuth}.identityId` + ) + .leftJoin( + TableName.IdentityKubernetesAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityKubernetesAuth}.identityId` + ) + .leftJoin( + TableName.IdentityOidcAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityOidcAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAzureAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityAzureAuth}.identityId` + ) + .leftJoin( + TableName.IdentityTokenAuth, + `${TableName.Identity}.id`, + `${TableName.IdentityTokenAuth}.identityId` + ) + .select( db.ref("id").withSchema(TableName.IdentityProjectMembership), db.ref("createdAt").withSchema(TableName.IdentityProjectMembership), @@ -52,12 +300,47 @@ export const identityProjectDALFactory = (db: TDbClient) => { db.ref("isTemporary").withSchema(TableName.IdentityProjectMembershipRole), db.ref("temporaryRange").withSchema(TableName.IdentityProjectMembershipRole), db.ref("temporaryAccessStartTime").withSchema(TableName.IdentityProjectMembershipRole), - db.ref("temporaryAccessEndTime").withSchema(TableName.IdentityProjectMembershipRole) + db.ref("temporaryAccessEndTime").withSchema(TableName.IdentityProjectMembershipRole), + db.ref("name").as("projectName").withSchema(TableName.Project), + db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth), + db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth), + db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth), + db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth), + db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth), + db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth), + db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth) ); + // TODO: scott - joins seem to reorder identities so need to order again, for the sake of urgency will optimize at a later point + if (filter.orderBy) { + switch (filter.orderBy) { + case "name": + void query.orderBy(`${TableName.Identity}.${filter.orderBy}`, filter.orderDirection); + break; + default: + // do nothing + } + } + + const docs = await query; + const members = sqlNestRelationships({ data: docs, - parentMapper: ({ identityId, identityName, identityAuthMethod, id, createdAt, updatedAt }) => ({ + parentMapper: ({ + identityId, + identityName, + uaId, + awsId, + gcpId, + kubernetesId, + oidcId, + azureId, + tokenId, + id, + createdAt, + updatedAt, + projectName + }) => ({ id, identityId, createdAt, @@ -65,7 +348,19 @@ export const identityProjectDALFactory = (db: TDbClient) => { identity: { id: identityId, name: identityName, - authMethod: identityAuthMethod + authMethods: buildAuthMethods({ + uaId, + awsId, + gcpId, + kubernetesId, + oidcId, + azureId, + tokenId + }) + }, + project: { + id: projectId, + name: projectName } }), key: "id", @@ -105,5 +400,37 @@ export const identityProjectDALFactory = (db: TDbClient) => { } }; - return { ...identityProjectOrm, findByProjectId }; + const getCountByProjectId = async ( + projectId: string, + filter: { identityId?: string } & Pick = {}, + tx?: Knex + ) => { + try { + const identities = await (tx || db.replicaNode())(TableName.IdentityProjectMembership) + .where(`${TableName.IdentityProjectMembership}.projectId`, projectId) + .join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`) + .join(TableName.Identity, `${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`) + .where((qb) => { + if (filter.identityId) { + void qb.where("identityId", filter.identityId); + } + + if (filter.search) { + void qb.whereILike(`${TableName.Identity}.name`, `%${filter.search}%`); + } + }) + .count(); + + return Number(identities[0].count); + } catch (error) { + throw new DatabaseError({ error, name: "GetCountByProjectId" }); + } + }; + + return { + ...identityProjectOrm, + findByIdentityId, + findByProjectId, + getCountByProjectId + }; }; diff --git a/backend/src/services/identity-project/identity-project-service.ts b/backend/src/services/identity-project/identity-project-service.ts index 10f2b34601..a49b15c1bd 100644 --- a/backend/src/services/identity-project/identity-project-service.ts +++ b/backend/src/services/identity-project/identity-project-service.ts @@ -5,7 +5,7 @@ import { ProjectMembershipRole } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { isAtLeastAsPrivileged } from "@app/lib/casl"; -import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy } from "@app/lib/fn"; import { ActorType } from "../auth/auth-type"; @@ -66,7 +66,7 @@ export const identityProjectServiceFactory = ({ const existingIdentity = await identityProjectDAL.findOne({ identityId, projectId }); if (existingIdentity) throw new BadRequestError({ - message: `Identity with id ${identityId} already exists in project with id ${projectId}` + message: `Identity with ID ${identityId} already exists in project with ID ${projectId}` }); const project = await projectDAL.findById(projectId); @@ -75,8 +75,8 @@ export const identityProjectServiceFactory = ({ orgId: project.orgId }); if (!identityOrgMembership) - throw new BadRequestError({ - message: `Failed to find identity with id ${identityId}` + throw new NotFoundError({ + message: `Failed to find identity with ID ${identityId}` }); for await (const { role: requestedRoleChange } of roles) { @@ -103,7 +103,8 @@ export const identityProjectServiceFactory = ({ $in: { slug: customInputRoles.map(({ role }) => role) } }) : []; - if (customRoles.length !== customInputRoles.length) throw new BadRequestError({ message: "Custom role not found" }); + if (customRoles.length !== customInputRoles.length) + throw new NotFoundError({ message: "One or more custom project roles not found" }); const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug); const projectIdentity = await identityProjectDAL.transaction(async (tx) => { @@ -164,8 +165,8 @@ export const identityProjectServiceFactory = ({ const projectIdentity = await identityProjectDAL.findOne({ identityId, projectId }); if (!projectIdentity) - throw new BadRequestError({ - message: `Identity with id ${identityId} doesn't exists in project with id ${projectId}` + throw new NotFoundError({ + message: `Identity with ID ${identityId} doesn't exists in project with ID ${projectId}` }); for await (const { role: requestedRoleChange } of roles) { @@ -174,9 +175,7 @@ export const identityProjectServiceFactory = ({ projectId ); - const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission); - - if (!hasRequiredPriviledges) { + if (!isAtLeastAsPrivileged(permission, rolePermission)) { throw new ForbiddenRequestError({ message: "Failed to change to a more privileged role" }); } } @@ -192,7 +191,8 @@ export const identityProjectServiceFactory = ({ $in: { slug: customInputRoles.map(({ role }) => role) } }) : []; - if (customRoles.length !== customInputRoles.length) throw new BadRequestError({ message: "Custom role not found" }); + if (customRoles.length !== customInputRoles.length) + throw new NotFoundError({ message: "One or more custom project roles not found" }); const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug); @@ -237,8 +237,9 @@ export const identityProjectServiceFactory = ({ projectId }: TDeleteProjectIdentityDTO) => { const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId }); - if (!identityProjectMembership) - throw new BadRequestError({ message: `Failed to find identity with id ${identityId}` }); + if (!identityProjectMembership) { + throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + } const { permission } = await permissionService.getProjectPermission( actor, @@ -255,8 +256,7 @@ export const identityProjectServiceFactory = ({ actorAuthMethod, actorOrgId ); - const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission); - if (!hasRequiredPriviledges) + if (!isAtLeastAsPrivileged(permission, identityRolePermission)) throw new ForbiddenRequestError({ message: "Failed to delete more privileged identity" }); const [deletedIdentity] = await identityProjectDAL.delete({ identityId, projectId }); @@ -268,7 +268,12 @@ export const identityProjectServiceFactory = ({ actor, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + limit, + offset, + orderBy, + orderDirection, + search }: TListProjectIdentityDTO) => { const { permission } = await permissionService.getProjectPermission( actor, @@ -279,8 +284,17 @@ export const identityProjectServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity); - const identityMemberships = await identityProjectDAL.findByProjectId(projectId); - return identityMemberships; + const identityMemberships = await identityProjectDAL.findByProjectId(projectId, { + limit, + offset, + orderBy, + orderDirection, + search + }); + + const totalCount = await identityProjectDAL.getCountByProjectId(projectId, { search }); + + return { identityMemberships, totalCount }; }; const getProjectIdentityByIdentityId = async ({ @@ -301,7 +315,10 @@ export const identityProjectServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity); const [identityMembership] = await identityProjectDAL.findByProjectId(projectId, { identityId }); - if (!identityMembership) throw new BadRequestError({ message: `Membership not found for identity ${identityId}` }); + if (!identityMembership) + throw new NotFoundError({ + message: `Project membership for identity with ID '${identityId} in project with ID '${projectId}' not found` + }); return identityMembership; }; diff --git a/backend/src/services/identity-project/identity-project-types.ts b/backend/src/services/identity-project/identity-project-types.ts index 43c671e50e..607fd48237 100644 --- a/backend/src/services/identity-project/identity-project-types.ts +++ b/backend/src/services/identity-project/identity-project-types.ts @@ -1,4 +1,4 @@ -import { TProjectPermission } from "@app/lib/types"; +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; import { ProjectUserMembershipTemporaryMode } from "../project-membership/project-membership-types"; @@ -40,8 +40,18 @@ export type TDeleteProjectIdentityDTO = { identityId: string; } & TProjectPermission; -export type TListProjectIdentityDTO = TProjectPermission; +export type TListProjectIdentityDTO = { + limit?: number; + offset?: number; + orderBy?: ProjectIdentityOrderBy; + orderDirection?: OrderByDirection; + search?: string; +} & TProjectPermission; export type TGetProjectIdentityByIdentityIdDTO = { identityId: string; } & TProjectPermission; + +export enum ProjectIdentityOrderBy { + Name = "name" +} diff --git a/backend/src/services/identity-token-auth/identity-token-auth-dal.ts b/backend/src/services/identity-token-auth/identity-token-auth-dal.ts new file mode 100644 index 0000000000..64f5c9e7a2 --- /dev/null +++ b/backend/src/services/identity-token-auth/identity-token-auth-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TIdentityTokenAuthDALFactory = ReturnType; + +export const identityTokenAuthDALFactory = (db: TDbClient) => { + const tokenAuthOrm = ormify(db, TableName.IdentityTokenAuth); + return tokenAuthOrm; +}; diff --git a/backend/src/services/identity-token-auth/identity-token-auth-service.ts b/backend/src/services/identity-token-auth/identity-token-auth-service.ts new file mode 100644 index 0000000000..39f2f65891 --- /dev/null +++ b/backend/src/services/identity-token-auth/identity-token-auth-service.ts @@ -0,0 +1,495 @@ +import { ForbiddenError } from "@casl/ability"; +import jwt from "jsonwebtoken"; + +import { IdentityAuthMethod, TableName } from "@app/db/schemas"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; + +import { ActorType, AuthTokenType } from "../auth/auth-type"; +import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; +import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; +import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types"; +import { TIdentityTokenAuthDALFactory } from "./identity-token-auth-dal"; +import { + TAttachTokenAuthDTO, + TCreateTokenAuthTokenDTO, + TGetTokenAuthDTO, + TGetTokenAuthTokensDTO, + TRevokeTokenAuthDTO, + TRevokeTokenAuthTokenDTO, + TUpdateTokenAuthDTO, + TUpdateTokenAuthTokenDTO +} from "./identity-token-auth-types"; + +type TIdentityTokenAuthServiceFactoryDep = { + identityTokenAuthDAL: Pick< + TIdentityTokenAuthDALFactory, + "transaction" | "create" | "findOne" | "updateById" | "delete" + >; + identityOrgMembershipDAL: Pick; + identityAccessTokenDAL: Pick< + TIdentityAccessTokenDALFactory, + "create" | "find" | "update" | "findById" | "findOne" | "updateById" | "delete" + >; + permissionService: Pick; + licenseService: Pick; +}; + +export type TIdentityTokenAuthServiceFactory = ReturnType; + +export const identityTokenAuthServiceFactory = ({ + identityTokenAuthDAL, + // identityDAL, + identityOrgMembershipDAL, + identityAccessTokenDAL, + permissionService, + licenseService +}: TIdentityTokenAuthServiceFactoryDep) => { + const attachTokenAuth = async ({ + identityId, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TAttachTokenAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "Failed to add Token Auth to already configured identity" + }); + } + + if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) { + throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity); + + const plan = await licenseService.getPlan(identityMembershipOrg.orgId); + const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => { + if ( + !plan.ipAllowlisting && + accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" && + accessTokenTrustedIp.ipAddress !== "::/0" + ) + throw new BadRequestError({ + message: + "Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range." + }); + if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress)) + throw new BadRequestError({ + message: "The IP is not a valid IPv4, IPv6, or CIDR block" + }); + return extractIPDetails(accessTokenTrustedIp.ipAddress); + }); + + const identityTokenAuth = await identityTokenAuthDAL.transaction(async (tx) => { + const doc = await identityTokenAuthDAL.create( + { + identityId: identityMembershipOrg.identityId, + accessTokenMaxTTL, + accessTokenTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps) + }, + tx + ); + return doc; + }); + return { ...identityTokenAuth, orgId: identityMembershipOrg.orgId }; + }; + + const updateTokenAuth = async ({ + identityId, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TUpdateTokenAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have token auth" + }); + } + + const identityTokenAuth = await identityTokenAuthDAL.findOne({ identityId }); + + if ( + (accessTokenMaxTTL || identityTokenAuth.accessTokenMaxTTL) > 0 && + (accessTokenTTL || identityTokenAuth.accessTokenMaxTTL) > + (accessTokenMaxTTL || identityTokenAuth.accessTokenMaxTTL) + ) { + throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const plan = await licenseService.getPlan(identityMembershipOrg.orgId); + const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => { + if ( + !plan.ipAllowlisting && + accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" && + accessTokenTrustedIp.ipAddress !== "::/0" + ) + throw new BadRequestError({ + message: + "Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range." + }); + if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress)) + throw new BadRequestError({ + message: "The IP is not a valid IPv4, IPv6, or CIDR block" + }); + return extractIPDetails(accessTokenTrustedIp.ipAddress); + }); + + const updatedTokenAuth = await identityTokenAuthDAL.updateById(identityTokenAuth.id, { + accessTokenMaxTTL, + accessTokenTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps: reformattedAccessTokenTrustedIps + ? JSON.stringify(reformattedAccessTokenTrustedIps) + : undefined + }); + + return { + ...updatedTokenAuth, + orgId: identityMembershipOrg.orgId + }; + }; + + const getTokenAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetTokenAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have Token Auth attached" + }); + } + + const identityTokenAuth = await identityTokenAuthDAL.findOne({ identityId }); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); + + return { ...identityTokenAuth, orgId: identityMembershipOrg.orgId }; + }; + + const revokeIdentityTokenAuth = async ({ + identityId, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TRevokeTokenAuthDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have Token Auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + + if (!isAtLeastAsPrivileged(permission, rolePermission)) { + throw new ForbiddenRequestError({ + message: "Failed to revoke Token Auth of identity with more privileged role" + }); + } + + const revokedIdentityTokenAuth = await identityTokenAuthDAL.transaction(async (tx) => { + const deletedTokenAuth = await identityTokenAuthDAL.delete({ identityId }, tx); + await identityAccessTokenDAL.delete({ + identityId, + authMethod: IdentityAuthMethod.TOKEN_AUTH + }); + + return { ...deletedTokenAuth?.[0], orgId: identityMembershipOrg.orgId }; + }); + return revokedIdentityTokenAuth; + }; + + const createTokenAuthToken = async ({ + identityId, + actorId, + actor, + actorAuthMethod, + actorOrgId, + name + }: TCreateTokenAuthTokenDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have Token Auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission); + if (!hasPriviledge) + throw new ForbiddenRequestError({ + message: "Failed to create token for identity with more privileged role" + }); + + const identityTokenAuth = await identityTokenAuthDAL.findOne({ identityId }); + + const identityAccessToken = await identityTokenAuthDAL.transaction(async (tx) => { + const newToken = await identityAccessTokenDAL.create( + { + identityId: identityTokenAuth.identityId, + isAccessTokenRevoked: false, + accessTokenTTL: identityTokenAuth.accessTokenTTL, + accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL, + accessTokenNumUses: 0, + accessTokenNumUsesLimit: identityTokenAuth.accessTokenNumUsesLimit, + name, + authMethod: IdentityAuthMethod.TOKEN_AUTH + }, + tx + ); + return newToken; + }); + + const appCfg = getConfig(); + const accessToken = jwt.sign( + { + identityId: identityTokenAuth.identityId, + identityAccessTokenId: identityAccessToken.id, + authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN + } as TIdentityAccessTokenJwtPayload, + appCfg.AUTH_SECRET, + { + expiresIn: + Number(identityAccessToken.accessTokenMaxTTL) === 0 + ? undefined + : Number(identityAccessToken.accessTokenMaxTTL) + } + ); + + return { accessToken, identityTokenAuth, identityAccessToken, identityMembershipOrg }; + }; + + const getTokenAuthTokens = async ({ + identityId, + offset = 0, + limit = 20, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TGetTokenAuthTokensDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have Token Auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); + + const tokens = await identityAccessTokenDAL.find( + { + identityId, + authMethod: IdentityAuthMethod.TOKEN_AUTH + }, + { offset, limit, sort: [["updatedAt", "desc"]] } + ); + + return { tokens, identityMembershipOrg }; + }; + + const updateTokenAuthToken = async ({ + tokenId, + name, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TUpdateTokenAuthTokenDTO) => { + const foundToken = await identityAccessTokenDAL.findOne({ + id: tokenId, + authMethod: IdentityAuthMethod.TOKEN_AUTH + }); + if (!foundToken) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` }); + + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: foundToken.identityId }); + if (!identityMembershipOrg) { + throw new NotFoundError({ message: `Failed to find identity with ID ${foundToken.identityId}` }); + } + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have Token Auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission); + if (!hasPriviledge) + throw new ForbiddenRequestError({ + message: "Failed to update token for identity with more privileged role" + }); + + const [token] = await identityAccessTokenDAL.update( + { + authMethod: IdentityAuthMethod.TOKEN_AUTH, + identityId: foundToken.identityId, + id: tokenId + }, + { + name + } + ); + + return { token, identityMembershipOrg }; + }; + + const revokeTokenAuthToken = async ({ + tokenId, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TRevokeTokenAuthTokenDTO) => { + const identityAccessToken = await identityAccessTokenDAL.findOne({ + [`${TableName.IdentityAccessToken}.id` as "id"]: tokenId, + isAccessTokenRevoked: false, + authMethod: IdentityAuthMethod.TOKEN_AUTH + }); + if (!identityAccessToken) + throw new NotFoundError({ + message: `Token with ID ${tokenId} not found or already revoked` + }); + + const identityOrgMembership = await identityOrgMembershipDAL.findOne({ + identityId: identityAccessToken.identityId + }); + + if (!identityOrgMembership) { + throw new NotFoundError({ message: `Failed to find identity with ID ${identityAccessToken.identityId}` }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityOrgMembership.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const [revokedToken] = await identityAccessTokenDAL.update( + { + id: identityAccessToken.id, + authMethod: IdentityAuthMethod.TOKEN_AUTH + }, + { + isAccessTokenRevoked: true + } + ); + + return { revokedToken }; + }; + + return { + attachTokenAuth, + updateTokenAuth, + getTokenAuth, + revokeIdentityTokenAuth, + createTokenAuthToken, + getTokenAuthTokens, + updateTokenAuthToken, + revokeTokenAuthToken + }; +}; diff --git a/backend/src/services/identity-token-auth/identity-token-auth-types.ts b/backend/src/services/identity-token-auth/identity-token-auth-types.ts new file mode 100644 index 0000000000..12c689728b --- /dev/null +++ b/backend/src/services/identity-token-auth/identity-token-auth-types.ts @@ -0,0 +1,45 @@ +import { TProjectPermission } from "@app/lib/types"; + +export type TAttachTokenAuthDTO = { + identityId: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: { ipAddress: string }[]; +} & Omit; + +export type TUpdateTokenAuthDTO = { + identityId: string; + accessTokenTTL?: number; + accessTokenMaxTTL?: number; + accessTokenNumUsesLimit?: number; + accessTokenTrustedIps?: { ipAddress: string }[]; +} & Omit; + +export type TGetTokenAuthDTO = { + identityId: string; +} & Omit; + +export type TRevokeTokenAuthDTO = { + identityId: string; +} & Omit; + +export type TCreateTokenAuthTokenDTO = { + identityId: string; + name?: string; +} & Omit; + +export type TGetTokenAuthTokensDTO = { + identityId: string; + offset: number; + limit: number; +} & Omit; + +export type TUpdateTokenAuthTokenDTO = { + tokenId: string; + name?: string; +} & Omit; + +export type TRevokeTokenAuthTokenDTO = { + tokenId: string; +} & Omit; diff --git a/backend/src/services/identity-ua/identity-ua-client-secret-dal.ts b/backend/src/services/identity-ua/identity-ua-client-secret-dal.ts index 6310da97a2..b9a4c678a1 100644 --- a/backend/src/services/identity-ua/identity-ua-client-secret-dal.ts +++ b/backend/src/services/identity-ua/identity-ua-client-secret-dal.ts @@ -4,6 +4,8 @@ import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; export type TIdentityUaClientSecretDALFactory = ReturnType; @@ -23,5 +25,61 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => { } }; - return { ...uaClientSecretOrm, incrementUsage }; + const removeExpiredClientSecrets = async (tx?: Knex) => { + const BATCH_SIZE = 10000; + const MAX_RETRY_ON_FAILURE = 3; + const MAX_TTL = 315_360_000; // Maximum TTL value in seconds (10 years) + + let deletedClientSecret: { id: string }[] = []; + let numberOfRetryOnFailure = 0; + let isRetrying = false; + + logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret started`); + do { + try { + const findExpiredClientSecretQuery = (tx || db)(TableName.IdentityUaClientSecret) + .where({ + isClientSecretRevoked: true + }) + .orWhere((qb) => { + void qb + .where("clientSecretNumUsesLimit", ">", 0) + .andWhere( + "clientSecretNumUses", + ">=", + db.ref("clientSecretNumUsesLimit").withSchema(TableName.IdentityUaClientSecret) + ); + }) + .orWhere((qb) => { + void qb + .where("clientSecretTTL", ">", 0) + .andWhereRaw( + `"${TableName.IdentityUaClientSecret}"."createdAt" + make_interval(secs => LEAST("${TableName.IdentityUaClientSecret}"."clientSecretTTL", ?)) < NOW()`, + [MAX_TTL] + ); + }) + .select("id") + .limit(BATCH_SIZE); + + // eslint-disable-next-line no-await-in-loop + deletedClientSecret = await (tx || db)(TableName.IdentityUaClientSecret) + .whereIn("id", findExpiredClientSecretQuery) + .del() + .returning("id"); + numberOfRetryOnFailure = 0; // reset + } catch (error) { + numberOfRetryOnFailure += 1; + logger.error(error, "Failed to delete client secret on pruning"); + } finally { + // eslint-disable-next-line no-await-in-loop + await new Promise((resolve) => { + setTimeout(resolve, 10); // time to breathe for db + }); + } + isRetrying = numberOfRetryOnFailure > 0; + } while (deletedClientSecret.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE)); + logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret completed`); + }; + + return { ...uaClientSecretOrm, incrementUsage, removeExpiredClientSecrets }; }; diff --git a/backend/src/services/identity-ua/identity-ua-service.ts b/backend/src/services/identity-ua/identity-ua-service.ts index 5e940871b5..b456c16477 100644 --- a/backend/src/services/identity-ua/identity-ua-service.ts +++ b/backend/src/services/identity-ua/identity-ua-service.ts @@ -10,11 +10,10 @@ import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/pe import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { isAtLeastAsPrivileged } from "@app/lib/casl"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { checkIPAgainstBlocklist, extractIPDetails, isValidIpOrCidr, TIp } from "@app/lib/ip"; import { ActorType, AuthTokenType } from "../auth/auth-type"; -import { TIdentityDALFactory } from "../identity/identity-dal"; import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types"; @@ -25,7 +24,9 @@ import { TCreateUaClientSecretDTO, TGetUaClientSecretsDTO, TGetUaDTO, + TGetUniversalAuthClientSecretByIdDTO, TRevokeUaClientSecretDTO, + TRevokeUaDTO, TUpdateUaDTO } from "./identity-ua-types"; @@ -34,7 +35,6 @@ type TIdentityUaServiceFactoryDep = { identityUaClientSecretDAL: TIdentityUaClientSecretDALFactory; identityAccessTokenDAL: TIdentityAccessTokenDALFactory; identityOrgMembershipDAL: TIdentityOrgDALFactory; - identityDAL: Pick; permissionService: Pick; licenseService: Pick; }; @@ -46,13 +46,16 @@ export const identityUaServiceFactory = ({ identityUaClientSecretDAL, identityAccessTokenDAL, identityOrgMembershipDAL, - identityDAL, permissionService, licenseService }: TIdentityUaServiceFactoryDep) => { const login = async (clientId: string, clientSecret: string, ip: string) => { const identityUa = await identityUaDAL.findOne({ clientId }); - if (!identityUa) throw new UnauthorizedError({ message: "Invalid credentials" }); + if (!identityUa) { + throw new NotFoundError({ + message: "No identity with specified client ID was found" + }); + } const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId }); @@ -83,7 +86,7 @@ export const identityUaServiceFactory = ({ }); throw new UnauthorizedError({ - message: "Failed to authenticate identity credentials due to expired client secret" + message: "Access denied due to expired client secret" }); } } @@ -95,7 +98,7 @@ export const identityUaServiceFactory = ({ isClientSecretRevoked: true }); throw new UnauthorizedError({ - message: "Failed to authenticate identity credentials due to client secret number of uses limit reached" + message: "Access denied due to client secret usage limit reached" }); } @@ -109,7 +112,8 @@ export const identityUaServiceFactory = ({ accessTokenTTL: identityUa.accessTokenTTL, accessTokenMaxTTL: identityUa.accessTokenMaxTTL, accessTokenNumUses: 0, - accessTokenNumUsesLimit: identityUa.accessTokenNumUsesLimit + accessTokenNumUsesLimit: identityUa.accessTokenNumUsesLimit, + authMethod: IdentityAuthMethod.UNIVERSAL_AUTH }, tx ); @@ -136,7 +140,7 @@ export const identityUaServiceFactory = ({ return { accessToken, identityUa, validClientSecretInfo, identityAccessToken, identityMembershipOrg }; }; - const attachUa = async ({ + const attachUniversalAuth = async ({ accessTokenMaxTTL, identityId, accessTokenNumUsesLimit, @@ -149,11 +153,13 @@ export const identityUaServiceFactory = ({ actorOrgId }: TAttachUaDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity.authMethod) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { throw new BadRequestError({ message: "Failed to add universal auth to already configured identity" }); + } if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) { throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" }); @@ -215,19 +221,12 @@ export const identityUaServiceFactory = ({ }, tx ); - await identityDAL.updateById( - identityMembershipOrg.identityId, - { - authMethod: IdentityAuthMethod.Univeral - }, - tx - ); return doc; }); return { ...identityUa, orgId: identityMembershipOrg.orgId }; }; - const updateUa = async ({ + const updateUniversalAuth = async ({ accessTokenMaxTTL, identityId, accessTokenNumUsesLimit, @@ -240,11 +239,13 @@ export const identityUaServiceFactory = ({ actorOrgId }: TUpdateUaDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.Univeral) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { throw new BadRequestError({ - message: "Failed to updated universal auth" + message: "The identity does not have universal auth" }); + } const uaIdentityAuth = await identityUaDAL.findOne({ identityId }); @@ -312,13 +313,15 @@ export const identityUaServiceFactory = ({ return { ...updatedUaAuth, orgId: identityMembershipOrg.orgId }; }; - const getIdentityUa = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetUaDTO) => { + const getIdentityUniversalAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetUaDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.Univeral) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { throw new BadRequestError({ message: "The identity does not have universal auth" }); + } const uaIdentityAuth = await identityUaDAL.findOne({ identityId }); @@ -333,7 +336,50 @@ export const identityUaServiceFactory = ({ return { ...uaIdentityAuth, orgId: identityMembershipOrg.orgId }; }; - const createUaClientSecret = async ({ + const revokeIdentityUniversalAuth = async ({ + identityId, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TRevokeUaDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have universal auth" + }); + } + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + if (!isAtLeastAsPrivileged(permission, rolePermission)) + throw new ForbiddenRequestError({ + message: "Failed to revoke universal auth of identity with more privileged role" + }); + + const revokedIdentityUniversalAuth = await identityUaDAL.transaction(async (tx) => { + const deletedUniversalAuth = await identityUaDAL.delete({ identityId }, tx); + return { ...deletedUniversalAuth?.[0], orgId: identityMembershipOrg.orgId }; + }); + return revokedIdentityUniversalAuth; + }; + + const createUniversalAuthClientSecret = async ({ actor, actorId, actorOrgId, @@ -344,11 +390,14 @@ export const identityUaServiceFactory = ({ numUsesLimit }: TCreateUaClientSecretDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.Univeral) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { throw new BadRequestError({ message: "The identity does not have universal auth" }); + } + const { permission } = await permissionService.getOrgPermission( actor, actorId, @@ -374,12 +423,11 @@ export const identityUaServiceFactory = ({ const appCfg = getConfig(); const clientSecret = crypto.randomBytes(32).toString("hex"); const clientSecretHash = await bcrypt.hash(clientSecret, appCfg.SALT_ROUNDS); - const identityUniversalAuth = await identityUaDAL.findOne({ - identityId - }); + + const identityUaAuth = await identityUaDAL.findOne({ identityId: identityMembershipOrg.identityId }); const identityUaClientSecret = await identityUaClientSecretDAL.create({ - identityUAId: identityUniversalAuth.id, + identityUAId: identityUaAuth.id, description, clientSecretPrefix: clientSecret.slice(0, 4), clientSecretHash, @@ -391,12 +439,11 @@ export const identityUaServiceFactory = ({ return { clientSecret, clientSecretData: identityUaClientSecret, - uaAuth: identityUniversalAuth, orgId: identityMembershipOrg.orgId }; }; - const getUaClientSecrets = async ({ + const getUniversalAuthClientSecrets = async ({ actor, actorId, actorOrgId, @@ -404,11 +451,13 @@ export const identityUaServiceFactory = ({ identityId }: TGetUaClientSecretsDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.Univeral) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { throw new BadRequestError({ message: "The identity does not have universal auth" }); + } const { permission } = await permissionService.getOrgPermission( actor, actorId, @@ -425,8 +474,8 @@ export const identityUaServiceFactory = ({ actorAuthMethod, actorOrgId ); - const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission); - if (!hasPriviledge) + + if (!isAtLeastAsPrivileged(permission, rolePermission)) throw new ForbiddenRequestError({ message: "Failed to add identity to project with more privileged role" }); @@ -442,7 +491,49 @@ export const identityUaServiceFactory = ({ return { clientSecrets, orgId: identityMembershipOrg.orgId }; }; - const revokeUaClientSecret = async ({ + const getUniversalAuthClientSecretById = async ({ + identityId, + actorId, + actor, + actorOrgId, + actorAuthMethod, + clientSecretId + }: TGetUniversalAuthClientSecretByIdDTO) => { + const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have universal auth" + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); + + const { permission: rolePermission } = await permissionService.getOrgPermission( + ActorType.IDENTITY, + identityMembershipOrg.identityId, + identityMembershipOrg.orgId, + actorAuthMethod, + actorOrgId + ); + if (!isAtLeastAsPrivileged(permission, rolePermission)) + throw new ForbiddenRequestError({ + message: "Failed to read identity client secret of project with more privileged role" + }); + + const clientSecret = await identityUaClientSecretDAL.findById(clientSecretId); + return { ...clientSecret, identityId, orgId: identityMembershipOrg.orgId }; + }; + + const revokeUniversalAuthClientSecret = async ({ identityId, actorId, actor, @@ -451,11 +542,14 @@ export const identityUaServiceFactory = ({ clientSecretId }: TRevokeUaClientSecretDTO) => { const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId }); - if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" }); - if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.Univeral) + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) { throw new BadRequestError({ message: "The identity does not have universal auth" }); + } + const { permission } = await permissionService.getOrgPermission( actor, actorId, @@ -472,10 +566,10 @@ export const identityUaServiceFactory = ({ actorAuthMethod, actorOrgId ); - const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission); - if (!hasPriviledge) + + if (!isAtLeastAsPrivileged(permission, rolePermission)) throw new ForbiddenRequestError({ - message: "Failed to add identity to project with more privileged role" + message: "Failed to revoke identity client secret with more privileged role" }); const clientSecret = await identityUaClientSecretDAL.updateById(clientSecretId, { @@ -486,11 +580,13 @@ export const identityUaServiceFactory = ({ return { login, - attachUa, - updateUa, - getIdentityUa, - createUaClientSecret, - getUaClientSecrets, - revokeUaClientSecret + attachUniversalAuth, + updateUniversalAuth, + getIdentityUniversalAuth, + revokeIdentityUniversalAuth, + createUniversalAuthClientSecret, + getUniversalAuthClientSecrets, + revokeUniversalAuthClientSecret, + getUniversalAuthClientSecretById }; }; diff --git a/backend/src/services/identity-ua/identity-ua-types.ts b/backend/src/services/identity-ua/identity-ua-types.ts index 2cc4762a81..2045c21432 100644 --- a/backend/src/services/identity-ua/identity-ua-types.ts +++ b/backend/src/services/identity-ua/identity-ua-types.ts @@ -22,6 +22,10 @@ export type TGetUaDTO = { identityId: string; } & Omit; +export type TRevokeUaDTO = { + identityId: string; +} & Omit; + export type TCreateUaClientSecretDTO = { identityId: string; description: string; @@ -37,3 +41,8 @@ export type TRevokeUaClientSecretDTO = { identityId: string; clientSecretId: string; } & Omit; + +export type TGetUniversalAuthClientSecretByIdDTO = { + identityId: string; + clientSecretId: string; +} & Omit; diff --git a/backend/src/services/identity/identity-fns.ts b/backend/src/services/identity/identity-fns.ts new file mode 100644 index 0000000000..49cf4d119f --- /dev/null +++ b/backend/src/services/identity/identity-fns.ts @@ -0,0 +1,29 @@ +import { IdentityAuthMethod } from "@app/db/schemas"; + +export const buildAuthMethods = ({ + uaId, + gcpId, + awsId, + kubernetesId, + oidcId, + azureId, + tokenId +}: { + uaId?: string; + gcpId?: string; + awsId?: string; + kubernetesId?: string; + oidcId?: string; + azureId?: string; + tokenId?: string; +}) => { + return [ + ...[uaId ? IdentityAuthMethod.UNIVERSAL_AUTH : null], + ...[gcpId ? IdentityAuthMethod.GCP_AUTH : null], + ...[awsId ? IdentityAuthMethod.AWS_AUTH : null], + ...[kubernetesId ? IdentityAuthMethod.KUBERNETES_AUTH : null], + ...[oidcId ? IdentityAuthMethod.OIDC_AUTH : null], + ...[azureId ? IdentityAuthMethod.AZURE_AUTH : null], + ...[tokenId ? IdentityAuthMethod.TOKEN_AUTH : null] + ].filter((authMethod) => authMethod) as IdentityAuthMethod[]; +}; diff --git a/backend/src/services/identity/identity-metadata-dal.ts b/backend/src/services/identity/identity-metadata-dal.ts new file mode 100644 index 0000000000..535420cf34 --- /dev/null +++ b/backend/src/services/identity/identity-metadata-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TIdentityMetadataDALFactory = ReturnType; + +export const identityMetadataDALFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.IdentityMetadata); + return orm; +}; diff --git a/backend/src/services/identity/identity-org-dal.ts b/backend/src/services/identity/identity-org-dal.ts index 95d742f334..bbdf96a2b1 100644 --- a/backend/src/services/identity/identity-org-dal.ts +++ b/backend/src/services/identity/identity-org-dal.ts @@ -1,9 +1,24 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName, TIdentityOrgMemberships } from "@app/db/schemas"; +import { + TableName, + TIdentityAwsAuths, + TIdentityAzureAuths, + TIdentityGcpAuths, + TIdentityKubernetesAuths, + TIdentityOidcAuths, + TIdentityOrgMemberships, + TIdentityTokenAuths, + TIdentityUniversalAuths, + TOrgRoles +} from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; +import { OrgIdentityOrderBy, TListOrgIdentitiesByOrgIdDTO } from "@app/services/identity/identity-types"; + +import { buildAuthMethods } from "./identity-fns"; export type TIdentityOrgDALFactory = ReturnType; @@ -12,28 +27,181 @@ export const identityOrgDALFactory = (db: TDbClient) => { const findOne = async (filter: Partial, tx?: Knex) => { try { - const [data] = await (tx || db)(TableName.IdentityOrgMembership) - .where(filter) + const [data] = await (tx || db.replicaNode())(TableName.IdentityOrgMembership) + .where((queryBuilder) => { + Object.entries(filter).forEach(([key, value]) => { + void queryBuilder.where(`${TableName.IdentityOrgMembership}.${key}`, value); + }); + }) .join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`) - .select(selectAllTableCols(TableName.IdentityOrgMembership)) - .select(db.ref("name").withSchema(TableName.Identity)) - .select(db.ref("authMethod").withSchema(TableName.Identity)); + + .leftJoin( + TableName.IdentityUniversalAuth, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.IdentityUniversalAuth}.identityId` + ) + .leftJoin( + TableName.IdentityGcpAuth, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.IdentityGcpAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAwsAuth, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.IdentityAwsAuth}.identityId` + ) + .leftJoin( + TableName.IdentityKubernetesAuth, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.IdentityKubernetesAuth}.identityId` + ) + .leftJoin( + TableName.IdentityOidcAuth, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.IdentityOidcAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAzureAuth, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.IdentityAzureAuth}.identityId` + ) + .leftJoin( + TableName.IdentityTokenAuth, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.IdentityTokenAuth}.identityId` + ) + + .select( + selectAllTableCols(TableName.IdentityOrgMembership), + + db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth), + db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth), + db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth), + db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth), + db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth), + db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth), + db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth), + + db.ref("name").withSchema(TableName.Identity) + ); + if (data) { - const { name, authMethod } = data; - return { ...data, identity: { id: data.identityId, name, authMethod } }; + const { name } = data; + return { + ...data, + identity: { + id: data.identityId, + name, + authMethods: buildAuthMethods(data) + } + }; } } catch (error) { throw new DatabaseError({ error, name: "FindOne" }); } }; - const findByOrgId = async (orgId: string, tx?: Knex) => { + const find = async ( + { + limit, + offset = 0, + orderBy = OrgIdentityOrderBy.Name, + orderDirection = OrderByDirection.ASC, + search, + ...filter + }: Partial & + Pick, + tx?: Knex + ) => { try { - const docs = await (tx || db)(TableName.IdentityOrgMembership) - .where(`${TableName.IdentityOrgMembership}.orgId`, orgId) - .join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`) - .leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`) - .select(selectAllTableCols(TableName.IdentityOrgMembership)) + const paginatedIdentity = (tx || db.replicaNode())(TableName.Identity) + .join( + TableName.IdentityOrgMembership, + `${TableName.IdentityOrgMembership}.identityId`, + `${TableName.Identity}.id` + ) + .orderBy(`${TableName.Identity}.${orderBy}`, orderDirection) + .select( + selectAllTableCols(TableName.IdentityOrgMembership), + db.ref("name").withSchema(TableName.Identity).as("identityName") + ) + .where(filter) + .as("paginatedIdentity"); + + if (search?.length) { + void paginatedIdentity.whereILike(`${TableName.Identity}.name`, `%${search}%`); + } + + if (limit) { + void paginatedIdentity.offset(offset).limit(limit); + } + + // akhilmhdh: refer this for pagination with multiple left queries + type TSubquery = Awaited; + const query = (tx || db.replicaNode()) + .from(paginatedIdentity) + .leftJoin(TableName.OrgRoles, `paginatedIdentity.roleId`, `${TableName.OrgRoles}.id`) + + .leftJoin(TableName.IdentityMetadata, (queryBuilder) => { + void queryBuilder + .on(`paginatedIdentity.identityId`, `${TableName.IdentityMetadata}.identityId`) + .andOn(`paginatedIdentity.orgId`, `${TableName.IdentityMetadata}.orgId`); + }) + + .leftJoin( + TableName.IdentityUniversalAuth, + "paginatedIdentity.identityId", + `${TableName.IdentityUniversalAuth}.identityId` + ) + .leftJoin( + TableName.IdentityGcpAuth, + "paginatedIdentity.identityId", + `${TableName.IdentityGcpAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAwsAuth, + "paginatedIdentity.identityId", + `${TableName.IdentityAwsAuth}.identityId` + ) + .leftJoin( + TableName.IdentityKubernetesAuth, + "paginatedIdentity.identityId", + `${TableName.IdentityKubernetesAuth}.identityId` + ) + .leftJoin( + TableName.IdentityOidcAuth, + "paginatedIdentity.identityId", + `${TableName.IdentityOidcAuth}.identityId` + ) + .leftJoin( + TableName.IdentityAzureAuth, + "paginatedIdentity.identityId", + `${TableName.IdentityAzureAuth}.identityId` + ) + .leftJoin( + TableName.IdentityTokenAuth, + "paginatedIdentity.identityId", + `${TableName.IdentityTokenAuth}.identityId` + ) + + .select( + db.ref("id").withSchema("paginatedIdentity"), + db.ref("role").withSchema("paginatedIdentity"), + db.ref("roleId").withSchema("paginatedIdentity"), + db.ref("orgId").withSchema("paginatedIdentity"), + db.ref("createdAt").withSchema("paginatedIdentity"), + db.ref("updatedAt").withSchema("paginatedIdentity"), + db.ref("identityId").withSchema("paginatedIdentity").as("identityId"), + db.ref("identityName").withSchema("paginatedIdentity"), + + db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth), + db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth), + db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth), + db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth), + db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth), + db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth), + db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth) + ) // cr stands for custom role .select(db.ref("id").as("crId").withSchema(TableName.OrgRoles)) .select(db.ref("name").as("crName").withSchema(TableName.OrgRoles)) @@ -41,11 +209,20 @@ export const identityOrgDALFactory = (db: TDbClient) => { .select(db.ref("description").as("crDescription").withSchema(TableName.OrgRoles)) .select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles)) .select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles)) - .select(db.ref("id").as("identityId").withSchema(TableName.Identity)) - .select(db.ref("name").as("identityName").withSchema(TableName.Identity)) - .select(db.ref("authMethod").as("identityAuthMethod").withSchema(TableName.Identity)); - return docs.map( - ({ + .select( + db.ref("id").withSchema(TableName.IdentityMetadata).as("metadataId"), + db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"), + db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue") + ); + if (orderBy === OrgIdentityOrderBy.Name) { + void query.orderBy("identityName", orderDirection); + } + + const docs = await query; + const formattedDocs = sqlNestRelationships({ + data: docs, + key: "id", + parentMapper: ({ crId, crDescription, crSlug, @@ -53,17 +230,29 @@ export const identityOrgDALFactory = (db: TDbClient) => { crName, identityId, identityName, - identityAuthMethod, - ...el + role, + roleId, + id, + orgId, + uaId, + awsId, + gcpId, + kubernetesId, + oidcId, + azureId, + tokenId, + createdAt, + updatedAt }) => ({ - ...el, + role, + roleId, identityId, - identity: { - id: identityId, - name: identityName, - authMethod: identityAuthMethod - }, - customRole: el.roleId + id, + + orgId, + createdAt, + updatedAt, + customRole: roleId ? { id: crId, name: crName, @@ -71,13 +260,61 @@ export const identityOrgDALFactory = (db: TDbClient) => { permissions: crPermission, description: crDescription } - : undefined - }) - ); + : undefined, + identity: { + id: identityId, + name: identityName, + authMethods: buildAuthMethods({ + uaId, + awsId, + gcpId, + kubernetesId, + oidcId, + azureId, + tokenId + }) + } + }), + childrenMapper: [ + { + key: "metadataId", + label: "metadata" as const, + mapper: ({ metadataKey, metadataValue, metadataId }) => ({ + id: metadataId, + key: metadataKey, + value: metadataValue + }) + } + ] + }); + + return formattedDocs; } catch (error) { throw new DatabaseError({ error, name: "FindByOrgId" }); } }; - return { ...identityOrgOrm, findOne, findByOrgId }; + const countAllOrgIdentities = async ( + { search, ...filter }: Partial & Pick, + tx?: Knex + ) => { + try { + const query = (tx || db.replicaNode())(TableName.IdentityOrgMembership) + .where(filter) + .join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`) + .count(); + + if (search?.length) { + void query.whereILike(`${TableName.Identity}.name`, `%${search}%`); + } + + const identities = await query; + + return Number(identities[0].count); + } catch (error) { + throw new DatabaseError({ error, name: "countAllOrgIdentities" }); + } + }; + + return { ...identityOrgOrm, find, findOne, countAllOrgIdentities }; }; diff --git a/backend/src/services/identity/identity-service.ts b/backend/src/services/identity/identity-service.ts index 2863bf23eb..fffcbacc21 100644 --- a/backend/src/services/identity/identity-service.ts +++ b/backend/src/services/identity/identity-service.ts @@ -1,29 +1,44 @@ import { ForbiddenError } from "@casl/ability"; -import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas"; +import { OrgMembershipRole, TableName, TOrgRoles } from "@app/db/schemas"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { isAtLeastAsPrivileged } from "@app/lib/casl"; -import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors"; -import { TOrgPermission } from "@app/lib/types"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal"; import { ActorType } from "../auth/auth-type"; import { TIdentityDALFactory } from "./identity-dal"; +import { TIdentityMetadataDALFactory } from "./identity-metadata-dal"; import { TIdentityOrgDALFactory } from "./identity-org-dal"; -import { TCreateIdentityDTO, TDeleteIdentityDTO, TUpdateIdentityDTO } from "./identity-types"; +import { + TCreateIdentityDTO, + TDeleteIdentityDTO, + TGetIdentityByIdDTO, + TListOrgIdentitiesByOrgIdDTO, + TListProjectIdentitiesByIdentityIdDTO, + TUpdateIdentityDTO +} from "./identity-types"; type TIdentityServiceFactoryDep = { identityDAL: TIdentityDALFactory; + identityMetadataDAL: TIdentityMetadataDALFactory; identityOrgMembershipDAL: TIdentityOrgDALFactory; + identityProjectDAL: Pick; permissionService: Pick; + licenseService: Pick; }; export type TIdentityServiceFactory = ReturnType; export const identityServiceFactory = ({ identityDAL, + identityMetadataDAL, identityOrgMembershipDAL, - permissionService + identityProjectDAL, + permissionService, + licenseService }: TIdentityServiceFactoryDep) => { const createIdentity = async ({ name, @@ -32,7 +47,8 @@ export const identityServiceFactory = ({ orgId, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + metadata }: TCreateIdentityDTO) => { const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity); @@ -43,7 +59,17 @@ export const identityServiceFactory = ({ ); const isCustomRole = Boolean(customRole); const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission); - if (!hasRequiredPriviledges) throw new BadRequestError({ message: "Failed to create a more privileged identity" }); + if (!hasRequiredPriviledges) + throw new ForbiddenRequestError({ message: "Failed to create a more privileged identity" }); + + const plan = await licenseService.getPlan(orgId); + + if (plan?.slug !== "enterprise" && plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) { + // limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed + throw new BadRequestError({ + message: "Failed to create identity due to identity limit reached. Upgrade plan to create more identities." + }); + } const identity = await identityDAL.transaction(async (tx) => { const newIdentity = await identityDAL.create({ name }, tx); @@ -56,8 +82,20 @@ export const identityServiceFactory = ({ }, tx ); - return newIdentity; + if (metadata && metadata.length) { + await identityMetadataDAL.insertMany( + metadata.map(({ key, value }) => ({ + identityId: newIdentity.id, + orgId, + key, + value + })), + tx + ); + } + return { ...newIdentity, authMethods: [] }; }); + await licenseService.updateSubscriptionOrgMemberCount(orgId); return identity; }; @@ -69,10 +107,11 @@ export const identityServiceFactory = ({ actor, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + metadata }: TUpdateIdentityDTO) => { const identityOrgMembership = await identityOrgMembershipDAL.findOne({ identityId: id }); - if (!identityOrgMembership) throw new BadRequestError({ message: `Failed to find identity with id ${id}` }); + if (!identityOrgMembership) throw new NotFoundError({ message: `Failed to find identity with id ${id}` }); const { permission } = await permissionService.getOrgPermission( actor, @@ -104,31 +143,63 @@ export const identityServiceFactory = ({ const isCustomRole = Boolean(customOrgRole); const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission); if (!hasRequiredNewRolePermission) - throw new BadRequestError({ message: "Failed to create a more privileged identity" }); + throw new ForbiddenRequestError({ message: "Failed to create a more privileged identity" }); if (isCustomRole) customRole = customOrgRole; } const identity = await identityDAL.transaction(async (tx) => { const newIdentity = name ? await identityDAL.updateById(id, { name }, tx) : await identityDAL.findById(id, tx); if (role) { - await identityOrgMembershipDAL.update( - { identityId: id }, + await identityOrgMembershipDAL.updateById( + identityOrgMembership.id, { role: customRole ? OrgMembershipRole.Custom : role, - roleId: customRole?.id + roleId: customRole?.id || null }, tx ); } + if (metadata) { + await identityMetadataDAL.delete({ orgId: identityOrgMembership.orgId, identityId: id }, tx); + if (metadata.length) { + await identityMetadataDAL.insertMany( + metadata.map(({ key, value }) => ({ + identityId: newIdentity.id, + orgId: identityOrgMembership.orgId, + key, + value + })), + tx + ); + } + } return newIdentity; }); return { ...identity, orgId: identityOrgMembership.orgId }; }; + const getIdentityById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TGetIdentityByIdDTO) => { + const doc = await identityOrgMembershipDAL.find({ + [`${TableName.IdentityOrgMembership}.identityId` as "identityId"]: id + }); + const identity = doc[0]; + if (!identity) throw new NotFoundError({ message: `Failed to find identity with id ${id}` }); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identity.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); + return identity; + }; + const deleteIdentity = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TDeleteIdentityDTO) => { const identityOrgMembership = await identityOrgMembershipDAL.findOne({ identityId: id }); - if (!identityOrgMembership) throw new BadRequestError({ message: `Failed to find identity with id ${id}` }); + if (!identityOrgMembership) throw new NotFoundError({ message: `Failed to find identity with id ${id}` }); const { permission } = await permissionService.getOrgPermission( actor, @@ -150,14 +221,64 @@ export const identityServiceFactory = ({ throw new ForbiddenRequestError({ message: "Failed to delete more privileged identity" }); const deletedIdentity = await identityDAL.deleteById(id); + + await licenseService.updateSubscriptionOrgMemberCount(identityOrgMembership.orgId); + return { ...deletedIdentity, orgId: identityOrgMembership.orgId }; }; - const listOrgIdentities = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TOrgPermission) => { + const listOrgIdentities = async ({ + orgId, + actor, + actorId, + actorAuthMethod, + actorOrgId, + limit, + offset, + orderBy, + orderDirection, + search + }: TListOrgIdentitiesByOrgIdDTO) => { const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); - const identityMemberships = await identityOrgMembershipDAL.findByOrgId(orgId); + const identityMemberships = await identityOrgMembershipDAL.find({ + [`${TableName.IdentityOrgMembership}.orgId` as "orgId"]: orgId, + limit, + offset, + orderBy, + orderDirection, + search + }); + + const totalCount = await identityOrgMembershipDAL.countAllOrgIdentities({ + [`${TableName.IdentityOrgMembership}.orgId` as "orgId"]: orgId, + search + }); + + return { identityMemberships, totalCount }; + }; + + const listProjectIdentitiesByIdentityId = async ({ + identityId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TListProjectIdentitiesByIdentityIdDTO) => { + const identityOrgMembership = await identityOrgMembershipDAL.findOne({ identityId }); + if (!identityOrgMembership) throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` }); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + identityOrgMembership.orgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity); + + const identityMemberships = await identityProjectDAL.findByIdentityId(identityId); return identityMemberships; }; @@ -165,6 +286,8 @@ export const identityServiceFactory = ({ createIdentity, updateIdentity, deleteIdentity, - listOrgIdentities + listOrgIdentities, + getIdentityById, + listProjectIdentitiesByIdentityId }; }; diff --git a/backend/src/services/identity/identity-types.ts b/backend/src/services/identity/identity-types.ts index 10b943667c..ceaf3ecfc3 100644 --- a/backend/src/services/identity/identity-types.ts +++ b/backend/src/services/identity/identity-types.ts @@ -1,23 +1,46 @@ import { IPType } from "@app/lib/ip"; -import { TOrgPermission } from "@app/lib/types"; +import { OrderByDirection, TOrgPermission } from "@app/lib/types"; export type TCreateIdentityDTO = { role: string; name: string; + metadata?: { key: string; value: string }[]; } & TOrgPermission; export type TUpdateIdentityDTO = { id: string; role?: string; name?: string; + metadata?: { key: string; value: string }[]; } & Omit; export type TDeleteIdentityDTO = { id: string; } & Omit; +export type TGetIdentityByIdDTO = { + id: string; +} & Omit; + export interface TIdentityTrustedIp { ipAddress: string; type: IPType; prefix: number; } + +export type TListProjectIdentitiesByIdentityIdDTO = { + identityId: string; +} & Omit; + +export type TListOrgIdentitiesByOrgIdDTO = { + limit?: number; + offset?: number; + orderBy?: OrgIdentityOrderBy; + orderDirection?: OrderByDirection; + search?: string; +} & TOrgPermission; + +export enum OrgIdentityOrderBy { + Name = "name" + // Role = "role" +} diff --git a/backend/src/services/integration-auth/integration-app-list.ts b/backend/src/services/integration-auth/integration-app-list.ts index 9cb0d822cd..8fe1231bb0 100644 --- a/backend/src/services/integration-auth/integration-app-list.ts +++ b/backend/src/services/integration-auth/integration-app-list.ts @@ -1,9 +1,13 @@ /* eslint-disable no-await-in-loop */ +import { createAppAuth } from "@octokit/auth-app"; import { Octokit } from "@octokit/rest"; +import { TIntegrationAuths } from "@app/db/schemas"; +import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; -import { BadRequestError } from "@app/lib/errors"; +import { NotFoundError } from "@app/lib/errors"; +import { IntegrationAuthMetadataSchema, TIntegrationAuthMetadata } from "./integration-auth-schema"; import { Integrations, IntegrationUrls } from "./integration-list"; // akhilmhdh: check this part later. Copied from old base @@ -230,7 +234,13 @@ const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => { /** * Return list of repositories for Github integration */ -const getAppsGithub = async ({ accessToken }: { accessToken: string }) => { +const getAppsGithub = async ({ + accessToken, + authMetadata +}: { + accessToken: string; + authMetadata?: TIntegrationAuthMetadata; +}) => { interface GitHubApp { id: string; name: string; @@ -242,37 +252,35 @@ const getAppsGithub = async ({ accessToken }: { accessToken: string }) => { }; } - const octokit = new Octokit({ - auth: accessToken - }); - - const getAllRepos = async () => { - let repos: GitHubApp[] = []; - let page = 1; - const perPage = 100; - let hasMore = true; - - while (hasMore) { - const response = await octokit.request( - "GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}", - { - per_page: perPage, - page - } - ); - - if ((response.data as GitHubApp[]).length > 0) { - repos = repos.concat(response.data as GitHubApp[]); - page += 1; - } else { - hasMore = false; + if (authMetadata?.installationId) { + const appCfg = getConfig(); + const octokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: appCfg.CLIENT_APP_ID_GITHUB_APP, + privateKey: appCfg.CLIENT_PRIVATE_KEY_GITHUB_APP, + installationId: authMetadata.installationId } - } + }); - return repos; - }; + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + const repos = await octokit.paginate("GET /installation/repositories", { + per_page: 100 + }); - const repos = await getAllRepos(); + return repos.map((a) => ({ + appId: String(a.id), + name: a.name, + owner: a.owner.login + })); + } + + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + const repos = (await new Octokit({ + auth: accessToken + }).paginate("GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}", { + per_page: 100 + })) as GitHubApp[]; const apps = repos .filter((a: GitHubApp) => a.permissions.admin === true) @@ -460,21 +468,51 @@ const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => { */ const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => { const res = ( - await request.get<{ reponame: string }[]>(`${IntegrationUrls.CIRCLECI_API_URL}/v1.1/projects`, { - headers: { - "Circle-Token": accessToken, - "Accept-Encoding": "application/json" + await request.get<{ reponame: string; username: string; vcs_url: string }[]>( + `${IntegrationUrls.CIRCLECI_API_URL}/v1.1/projects`, + { + headers: { + "Circle-Token": accessToken, + "Accept-Encoding": "application/json" + } } - }) + ) ).data; - const apps = res?.map((a) => ({ - name: a?.reponame + const apps = res.map((a) => ({ + owner: a.username, // username maps to unique organization name in CircleCI + name: a.reponame, // reponame maps to project name within an organization in CircleCI + appId: a.vcs_url.split("/").pop() // vcs_url maps to the project id in CircleCI })); return apps; }; +/** + * Return list of projects for Databricks integration + */ +const getAppsDatabricks = async ({ url, accessToken }: { url?: string | null; accessToken: string }) => { + const databricksApiUrl = `${url}/api`; + + const res = await request.get<{ scopes: { name: string; backend_type: string }[] }>( + `${databricksApiUrl}/2.0/secrets/scopes/list`, + { + headers: { + Authorization: `Bearer ${accessToken}`, + "Accept-Encoding": "application/json" + } + } + ); + + const scopes = + res.data?.scopes?.map((a) => ({ + name: a.name, // name maps to unique scope name in Databricks + backend_type: a.backend_type + })) ?? []; + + return scopes; +}; + const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => { const res = ( await request.get<{ id: string; slug: string }[]>(`${IntegrationUrls.TRAVISCI_API_URL}/repos`, { @@ -1030,18 +1068,41 @@ const getAppsCloud66 = async ({ accessToken }: { accessToken: string }) => { return apps; }; +const getAppsAzureDevOps = async ({ accessToken, orgName }: { accessToken: string; orgName: string }) => { + const res = ( + await request.get<{ count: number; value: Record[] }>( + `${IntegrationUrls.AZURE_DEVOPS_API_URL}/${orgName}/_apis/projects?api-version=7.2-preview.2`, + { + headers: { + Authorization: `Basic ${accessToken}` + } + } + ) + ).data; + const apps = res.value.map((a) => ({ + name: a.name, + appId: a.id + })); + + return apps; +}; + export const getApps = async ({ integration, + integrationAuth, accessToken, accessId, teamId, + azureDevOpsOrgName, workspaceSlug, url }: { integration: string; accessToken: string; accessId?: string; + integrationAuth: TIntegrationAuths; teamId?: string | null; + azureDevOpsOrgName?: string | null; workspaceSlug?: string; url?: string | null; }): Promise => { @@ -1052,6 +1113,8 @@ export const getApps = async ({ }); case Integrations.AZURE_KEY_VAULT: return []; + case Integrations.AZURE_APP_CONFIGURATION: + return []; case Integrations.AWS_PARAMETER_STORE: return []; case Integrations.AWS_SECRET_MANAGER: @@ -1073,7 +1136,8 @@ export const getApps = async ({ case Integrations.GITHUB: return getAppsGithub({ - accessToken + accessToken, + authMetadata: IntegrationAuthMetadataSchema.parse(integrationAuth.metadata || {}) }); case Integrations.GITLAB: @@ -1103,6 +1167,12 @@ export const getApps = async ({ accessToken }); + case Integrations.DATABRICKS: + return getAppsDatabricks({ + url, + accessToken + }); + case Integrations.LARAVELFORGE: return getAppsLaravelForge({ accessToken, @@ -1184,7 +1254,13 @@ export const getApps = async ({ accessToken }); + case Integrations.AZURE_DEVOPS: + return getAppsAzureDevOps({ + accessToken, + orgName: azureDevOpsOrgName as string + }); + default: - throw new BadRequestError({ message: "integration not found" }); + throw new NotFoundError({ message: `Integration '${integration}' not found` }); } }; diff --git a/backend/src/services/integration-auth/integration-auth-dal.ts b/backend/src/services/integration-auth/integration-auth-dal.ts index d32cd1579e..7a56afcbb3 100644 --- a/backend/src/services/integration-auth/integration-auth-dal.ts +++ b/backend/src/services/integration-auth/integration-auth-dal.ts @@ -3,7 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TIntegrationAuths, TIntegrationAuthsUpdate } from "@app/db/schemas"; import { BadRequestError, DatabaseError } from "@app/lib/errors"; -import { ormify } from "@app/lib/knex"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; export type TIntegrationAuthDALFactory = ReturnType; @@ -28,8 +28,23 @@ export const integrationAuthDALFactory = (db: TDbClient) => { } }; + const getByOrg = async (orgId: string, tx?: Knex) => { + try { + const integrationAuths = await (tx || db)(TableName.IntegrationAuth) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.IntegrationAuth}.projectId`) + .join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.Project}.orgId`) + .where(`${TableName.Organization}.id`, "=", orgId) + .select(selectAllTableCols(TableName.IntegrationAuth)); + + return integrationAuths; + } catch (error) { + throw new DatabaseError({ error, name: "get by org" }); + } + }; + return { ...integrationAuthOrm, - bulkUpdate + bulkUpdate, + getByOrg }; }; diff --git a/backend/src/services/integration-auth/integration-auth-schema.ts b/backend/src/services/integration-auth/integration-auth-schema.ts new file mode 100644 index 0000000000..94a68cc72c --- /dev/null +++ b/backend/src/services/integration-auth/integration-auth-schema.ts @@ -0,0 +1,7 @@ +import { z } from "zod"; + +export const IntegrationAuthMetadataSchema = z.object({ + installationId: z.string().optional() +}); + +export type TIntegrationAuthMetadata = z.infer; diff --git a/backend/src/services/integration-auth/integration-auth-service.ts b/backend/src/services/integration-auth/integration-auth-service.ts index 02091d88cb..636f634a3e 100644 --- a/backend/src/services/integration-auth/integration-auth-service.ts +++ b/backend/src/services/integration-auth/integration-auth-service.ts @@ -1,30 +1,37 @@ import { ForbiddenError } from "@casl/ability"; +import { createAppAuth } from "@octokit/auth-app"; import { Octokit } from "@octokit/rest"; import AWS from "aws-sdk"; import { SecretEncryptionAlgo, SecretKeyEncoding, TIntegrationAuths, TIntegrationAuthsInsert } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; -import { TProjectPermission } from "@app/lib/types"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { TGenericPermission, TProjectPermission } from "@app/lib/types"; import { TIntegrationDALFactory } from "../integration/integration-dal"; -import { TProjectBotDALFactory } from "../project-bot/project-bot-dal"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; import { TProjectBotServiceFactory } from "../project-bot/project-bot-service"; import { getApps } from "./integration-app-list"; import { TIntegrationAuthDALFactory } from "./integration-auth-dal"; +import { IntegrationAuthMetadataSchema, TIntegrationAuthMetadata } from "./integration-auth-schema"; import { + TBitbucketEnvironment, TBitbucketWorkspace, TChecklyGroups, TDeleteIntegrationAuthByIdDTO, TDeleteIntegrationAuthsDTO, + TDuplicateGithubIntegrationAuthDTO, TGetIntegrationAuthDTO, TGetIntegrationAuthTeamCityBuildConfigDTO, THerokuPipelineCoupling, TIntegrationAuthAppsDTO, TIntegrationAuthAwsKmsKeyDTO, + TIntegrationAuthBitbucketEnvironmentsDTO, TIntegrationAuthBitbucketWorkspaceDTO, TIntegrationAuthChecklyGroupsDTO, TIntegrationAuthGithubEnvsDTO, @@ -53,8 +60,8 @@ type TIntegrationAuthServiceFactoryDep = { integrationAuthDAL: TIntegrationAuthDALFactory; integrationDAL: Pick; projectBotService: Pick; - projectBotDAL: Pick; permissionService: Pick; + kmsService: Pick; }; export type TIntegrationAuthServiceFactory = ReturnType; @@ -63,8 +70,8 @@ export const integrationAuthServiceFactory = ({ permissionService, integrationAuthDAL, integrationDAL, - projectBotDAL, - projectBotService + projectBotService, + kmsService }: TIntegrationAuthServiceFactoryDep) => { const listIntegrationAuthByProjectId = async ({ actorId, @@ -85,9 +92,27 @@ export const integrationAuthServiceFactory = ({ return authorizations; }; + const listOrgIntegrationAuth = async ({ actorId, actor, actorOrgId, actorAuthMethod }: TGenericPermission) => { + const authorizations = await integrationAuthDAL.getByOrg(actorOrgId as string); + + return Promise.all( + authorizations.filter(async (auth) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + auth.projectId, + actorAuthMethod, + actorOrgId + ); + + return permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); + }) + ); + }; + const getIntegrationAuth = async ({ actor, id, actorId, actorAuthMethod, actorOrgId }: TGetIntegrationAuthDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -108,7 +133,8 @@ export const integrationAuthServiceFactory = ({ actorAuthMethod, integration, url, - code + code, + installationId }: TOauthExchangeDTO) => { if (!Object.values(Integrations).includes(integration as Integrations)) throw new BadRequestError({ message: "Invalid integration" }); @@ -122,10 +148,7 @@ export const integrationAuthServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Integrations); - const bot = await projectBotDAL.findOne({ isActive: true, projectId }); - if (!bot) throw new BadRequestError({ message: "Bot must be enabled for oauth2 code token exchange" }); - - const tokenExchange = await exchangeCode({ integration, code, url }); + const tokenExchange = await exchangeCode({ integration, code, url, installationId }); const updateDoc: TIntegrationAuthsInsert = { projectId, integration, @@ -143,27 +166,68 @@ export const integrationAuthServiceFactory = ({ updateDoc.metadata = { authMethod: "oauth2" }; + } else if (integration === Integrations.GITHUB && installationId) { + updateDoc.metadata = { + installationId, + installationName: tokenExchange.installationName, + authMethod: "app" + }; } - const key = await projectBotService.getBotKey(projectId); - if (tokenExchange.refreshToken) { - const refreshEncToken = encryptSymmetric128BitHexKeyUTF8(tokenExchange.refreshToken, key); - updateDoc.refreshIV = refreshEncToken.iv; - updateDoc.refreshTag = refreshEncToken.tag; - updateDoc.refreshCiphertext = refreshEncToken.ciphertext; + if (installationId && integration === Integrations.GITHUB) { + return integrationAuthDAL.create(updateDoc); } - if (tokenExchange.accessToken) { - const accessEncToken = encryptSymmetric128BitHexKeyUTF8(tokenExchange.accessToken, key); - updateDoc.accessIV = accessEncToken.iv; - updateDoc.accessTag = accessEncToken.tag; - updateDoc.accessCiphertext = accessEncToken.ciphertext; + + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + if (tokenExchange.refreshToken) { + const refreshEncToken = secretManagerEncryptor({ + plainText: Buffer.from(tokenExchange.refreshToken) + }).cipherTextBlob; + updateDoc.encryptedRefresh = refreshEncToken; + } + if (tokenExchange.accessToken) { + const accessToken = secretManagerEncryptor({ + plainText: Buffer.from(tokenExchange.accessToken) + }).cipherTextBlob; + updateDoc.encryptedAccess = accessToken; + } + } else { + if (!botKey) throw new NotFoundError({ message: `Project bot key for project with ID '${projectId}' not found` }); + if (tokenExchange.refreshToken) { + const refreshEncToken = encryptSymmetric128BitHexKeyUTF8(tokenExchange.refreshToken, botKey); + updateDoc.refreshIV = refreshEncToken.iv; + updateDoc.refreshTag = refreshEncToken.tag; + updateDoc.refreshCiphertext = refreshEncToken.ciphertext; + } + if (tokenExchange.accessToken) { + const accessEncToken = encryptSymmetric128BitHexKeyUTF8(tokenExchange.accessToken, botKey); + updateDoc.accessIV = accessEncToken.iv; + updateDoc.accessTag = accessEncToken.tag; + updateDoc.accessCiphertext = accessEncToken.ciphertext; + } } + return integrationAuthDAL.transaction(async (tx) => { - const doc = await integrationAuthDAL.findOne({ projectId, integration }, tx); - if (!doc) { + const integrationAuths = await integrationAuthDAL.find({ projectId, integration }, { tx }); + let existingIntegrationAuth: TIntegrationAuths | undefined; + + // we need to ensure that the integration auth that we use for Github is actually Oauth + if (integration === Integrations.GITHUB) { + existingIntegrationAuth = integrationAuths.find((integAuth) => !integAuth.metadata); + } else { + [existingIntegrationAuth] = integrationAuths; + } + + if (!existingIntegrationAuth) { return integrationAuthDAL.create(updateDoc, tx); } - return integrationAuthDAL.updateById(doc.id, updateDoc, tx); + + return integrationAuthDAL.updateById(existingIntegrationAuth.id, updateDoc, tx); }); }; @@ -178,7 +242,8 @@ export const integrationAuthServiceFactory = ({ actorAuthMethod, accessId, namespace, - accessToken + accessToken, + awsAssumeIamRoleArn }: TSaveIntegrationAccessTokenDTO) => { if (!Object.values(Integrations).includes(integration as Integrations)) throw new BadRequestError({ message: "Invalid integration" }); @@ -192,9 +257,6 @@ export const integrationAuthServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Integrations); - const bot = await projectBotDAL.findOne({ isActive: true, projectId }); - if (!bot) throw new BadRequestError({ message: "Bot must be enabled for oauth2 code token exchange" }); - const updateDoc: TIntegrationAuthsInsert = { projectId, namespace, @@ -211,95 +273,218 @@ export const integrationAuthServiceFactory = ({ : {}) }; - const key = await projectBotService.getBotKey(projectId); - if (refreshToken) { - const tokenDetails = await exchangeRefresh( - integration, - refreshToken, - url, - updateDoc.metadata as Record - ); - const refreshEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.refreshToken, key); - updateDoc.refreshIV = refreshEncToken.iv; - updateDoc.refreshTag = refreshEncToken.tag; - updateDoc.refreshCiphertext = refreshEncToken.ciphertext; - const accessEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.accessToken, key); - updateDoc.accessIV = accessEncToken.iv; - updateDoc.accessTag = accessEncToken.tag; - updateDoc.accessCiphertext = accessEncToken.ciphertext; - updateDoc.accessExpiresAt = tokenDetails.accessExpiresAt; - } + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + if (refreshToken) { + const tokenDetails = await exchangeRefresh( + integration, + refreshToken, + url, + updateDoc.metadata as Record + ); + const refreshEncToken = secretManagerEncryptor({ + plainText: Buffer.from(tokenDetails.refreshToken) + }).cipherTextBlob; + updateDoc.encryptedRefresh = refreshEncToken; - if (!refreshToken && (accessId || accessToken)) { - if (accessToken) { - const accessEncToken = encryptSymmetric128BitHexKeyUTF8(accessToken, key); + const accessEncToken = secretManagerEncryptor({ + plainText: Buffer.from(tokenDetails.accessToken) + }).cipherTextBlob; + updateDoc.encryptedAccess = accessEncToken; + updateDoc.accessExpiresAt = tokenDetails.accessExpiresAt; + } + + if (!refreshToken && (accessId || accessToken || awsAssumeIamRoleArn)) { + if (accessToken) { + const accessEncToken = secretManagerEncryptor({ + plainText: Buffer.from(accessToken) + }).cipherTextBlob; + updateDoc.encryptedAccess = accessEncToken; + } + if (accessId) { + const accessEncToken = secretManagerEncryptor({ + plainText: Buffer.from(accessId) + }).cipherTextBlob; + updateDoc.encryptedAccessId = accessEncToken; + } + if (awsAssumeIamRoleArn) { + const awsAssumeIamRoleArnEncrypted = secretManagerEncryptor({ + plainText: Buffer.from(awsAssumeIamRoleArn) + }).cipherTextBlob; + updateDoc.encryptedAwsAssumeIamRoleArn = awsAssumeIamRoleArnEncrypted; + } + } + } else { + if (!botKey) throw new NotFoundError({ message: `Project bot key for project with ID '${projectId}' not found` }); + if (refreshToken) { + const tokenDetails = await exchangeRefresh( + integration, + refreshToken, + url, + updateDoc.metadata as Record + ); + const refreshEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.refreshToken, botKey); + updateDoc.refreshIV = refreshEncToken.iv; + updateDoc.refreshTag = refreshEncToken.tag; + updateDoc.refreshCiphertext = refreshEncToken.ciphertext; + const accessEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.accessToken, botKey); updateDoc.accessIV = accessEncToken.iv; updateDoc.accessTag = accessEncToken.tag; updateDoc.accessCiphertext = accessEncToken.ciphertext; + + updateDoc.accessExpiresAt = tokenDetails.accessExpiresAt; } - if (accessId) { - const accessEncToken = encryptSymmetric128BitHexKeyUTF8(accessId, key); - updateDoc.accessIdIV = accessEncToken.iv; - updateDoc.accessIdTag = accessEncToken.tag; - updateDoc.accessIdCiphertext = accessEncToken.ciphertext; + + if (!refreshToken && (accessId || accessToken || awsAssumeIamRoleArn)) { + if (accessToken) { + const accessEncToken = encryptSymmetric128BitHexKeyUTF8(accessToken, botKey); + updateDoc.accessIV = accessEncToken.iv; + updateDoc.accessTag = accessEncToken.tag; + updateDoc.accessCiphertext = accessEncToken.ciphertext; + } + if (accessId) { + const accessEncToken = encryptSymmetric128BitHexKeyUTF8(accessId, botKey); + updateDoc.accessIdIV = accessEncToken.iv; + updateDoc.accessIdTag = accessEncToken.tag; + updateDoc.accessIdCiphertext = accessEncToken.ciphertext; + } + if (awsAssumeIamRoleArn) { + const awsAssumeIamRoleArnEnc = encryptSymmetric128BitHexKeyUTF8(awsAssumeIamRoleArn, botKey); + updateDoc.awsAssumeIamRoleArnCipherText = awsAssumeIamRoleArnEnc.ciphertext; + updateDoc.awsAssumeIamRoleArnIV = awsAssumeIamRoleArnEnc.iv; + updateDoc.awsAssumeIamRoleArnTag = awsAssumeIamRoleArnEnc.tag; + } } } return integrationAuthDAL.create(updateDoc); }; // helper function - const getIntegrationAccessToken = async (integrationAuth: TIntegrationAuths, botKey: string) => { + const getIntegrationAccessToken = async ( + integrationAuth: TIntegrationAuths, + shouldUseSecretV2Bridge: boolean, + botKey?: string + ) => { let accessToken: string | undefined; let accessId: string | undefined; - if (integrationAuth.accessTag && integrationAuth.accessIV && integrationAuth.accessCiphertext) { - accessToken = decryptSymmetric128BitHexKeyUTF8({ - ciphertext: integrationAuth.accessCiphertext, - iv: integrationAuth.accessIV, - tag: integrationAuth.accessTag, - key: botKey - }); + // this means its not access token based + if ( + (integrationAuth.integration === Integrations.AWS_SECRET_MANAGER || + integrationAuth.integration === Integrations.AWS_PARAMETER_STORE) && + (shouldUseSecretV2Bridge + ? integrationAuth.encryptedAwsAssumeIamRoleArn + : integrationAuth.awsAssumeIamRoleArnCipherText) + ) { + return { accessToken: "", accessId: "" }; + } + if ( + integrationAuth.integration === Integrations.GITHUB && + IntegrationAuthMetadataSchema.parse(integrationAuth.metadata || {}).installationId + ) { + return { accessToken: "", accessId: "" }; } - if (integrationAuth.refreshCiphertext && integrationAuth.refreshIV && integrationAuth.refreshTag) { - const refreshToken = decryptSymmetric128BitHexKeyUTF8({ - key: botKey, - ciphertext: integrationAuth.refreshCiphertext, - iv: integrationAuth.refreshIV, - tag: integrationAuth.refreshTag - }); + if (shouldUseSecretV2Bridge) { + const { decryptor: secretManagerDecryptor, encryptor: secretManagerEncryptor } = + await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId: integrationAuth.projectId + }); + if (integrationAuth.encryptedAccess) { + accessToken = secretManagerDecryptor({ cipherTextBlob: integrationAuth.encryptedAccess }).toString(); + } - if (integrationAuth.accessExpiresAt && integrationAuth.accessExpiresAt < new Date()) { - // refer above it contains same logic except not saving - const tokenDetails = await exchangeRefresh( - integrationAuth.integration, - refreshToken, - integrationAuth?.url, - integrationAuth.metadata as Record - ); - const refreshEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.refreshToken, botKey); - const accessEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.accessToken, botKey); - accessToken = tokenDetails.accessToken; - await integrationAuthDAL.updateById(integrationAuth.id, { - refreshIV: refreshEncToken.iv, - refreshTag: refreshEncToken.tag, - refreshCiphertext: refreshEncToken.ciphertext, - accessExpiresAt: tokenDetails.accessExpiresAt, - accessIV: accessEncToken.iv, - accessTag: accessEncToken.tag, - accessCiphertext: accessEncToken.ciphertext + if (integrationAuth.encryptedRefresh) { + const refreshToken = secretManagerDecryptor({ cipherTextBlob: integrationAuth.encryptedRefresh }).toString(); + + if (integrationAuth.accessExpiresAt && integrationAuth.accessExpiresAt < new Date()) { + // refer above it contains same logic except not saving + const tokenDetails = await exchangeRefresh( + integrationAuth.integration, + refreshToken, + integrationAuth?.url, + integrationAuth.metadata as Record + ); + const encryptedRefresh = secretManagerEncryptor({ + plainText: Buffer.from(tokenDetails.refreshToken) + }).cipherTextBlob; + const encryptedAccess = secretManagerEncryptor({ + plainText: Buffer.from(tokenDetails.accessToken) + }).cipherTextBlob; + accessToken = tokenDetails.accessToken; + await integrationAuthDAL.updateById(integrationAuth.id, { + accessExpiresAt: tokenDetails.accessExpiresAt, + encryptedRefresh, + encryptedAccess + }); + } + } + if (!accessToken) throw new BadRequestError({ message: "Missing access token" }); + + if (integrationAuth.encryptedAccessId) { + accessId = secretManagerDecryptor({ + cipherTextBlob: integrationAuth.encryptedAccessId + }).toString(); + } + + // the old bot key is else + } else { + if (!botKey) throw new NotFoundError({ message: "Project bot key not found" }); + if (integrationAuth.accessTag && integrationAuth.accessIV && integrationAuth.accessCiphertext) { + accessToken = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: integrationAuth.accessCiphertext, + iv: integrationAuth.accessIV, + tag: integrationAuth.accessTag, + key: botKey + }); + } + + if (integrationAuth.refreshCiphertext && integrationAuth.refreshIV && integrationAuth.refreshTag) { + const refreshToken = decryptSymmetric128BitHexKeyUTF8({ + key: botKey, + ciphertext: integrationAuth.refreshCiphertext, + iv: integrationAuth.refreshIV, + tag: integrationAuth.refreshTag + }); + + if (integrationAuth.accessExpiresAt && integrationAuth.accessExpiresAt < new Date()) { + // refer above it contains same logic except not saving + const tokenDetails = await exchangeRefresh( + integrationAuth.integration, + refreshToken, + integrationAuth?.url, + integrationAuth.metadata as Record + ); + const refreshEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.refreshToken, botKey); + const accessEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.accessToken, botKey); + accessToken = tokenDetails.accessToken; + await integrationAuthDAL.updateById(integrationAuth.id, { + refreshIV: refreshEncToken.iv, + refreshTag: refreshEncToken.tag, + refreshCiphertext: refreshEncToken.ciphertext, + accessExpiresAt: tokenDetails.accessExpiresAt, + accessIV: accessEncToken.iv, + accessTag: accessEncToken.tag, + accessCiphertext: accessEncToken.ciphertext + }); + } + } + if (!accessToken) throw new BadRequestError({ message: "Missing access token" }); + + if (integrationAuth.accessIdTag && integrationAuth.accessIdIV && integrationAuth.accessIdCiphertext) { + accessId = decryptSymmetric128BitHexKeyUTF8({ + key: botKey, + ciphertext: integrationAuth.accessIdCiphertext, + iv: integrationAuth.accessIdIV, + tag: integrationAuth.accessIdTag }); } } - if (!accessToken) throw new BadRequestError({ message: "Missing access token" }); - if (integrationAuth.accessIdTag && integrationAuth.accessIdIV && integrationAuth.accessIdCiphertext) { - accessId = decryptSymmetric128BitHexKeyUTF8({ - key: botKey, - ciphertext: integrationAuth.accessIdCiphertext, - iv: integrationAuth.accessIdIV, - tag: integrationAuth.accessIdTag - }); - } return { accessId, accessToken }; }; @@ -309,11 +494,12 @@ export const integrationAuthServiceFactory = ({ actorOrgId, actorAuthMethod, teamId, + azureDevOpsOrgName, id, workspaceSlug }: TIntegrationAuthAppsDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -324,13 +510,15 @@ export const integrationAuthServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken, accessId } = await getIntegrationAccessToken(integrationAuth, botKey); + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken, accessId } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); const apps = await getApps({ integration: integrationAuth.integration, + integrationAuth, accessToken, accessId, teamId, + azureDevOpsOrgName, workspaceSlug, url: integrationAuth.url }); @@ -345,7 +533,7 @@ export const integrationAuthServiceFactory = ({ id }: TIntegrationAuthTeamsDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -356,8 +544,8 @@ export const integrationAuthServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); const teams = await getTeams({ integration: integrationAuth.integration, accessToken, @@ -375,7 +563,7 @@ export const integrationAuthServiceFactory = ({ actorOrgId }: TIntegrationAuthVercelBranchesDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -385,8 +573,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (appId) { const { data } = await request.get( @@ -416,7 +604,7 @@ export const integrationAuthServiceFactory = ({ accountId }: TIntegrationAuthChecklyGroupsDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -426,8 +614,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (accountId) { const { data } = await request.get(`${IntegrationUrls.CHECKLY_API_URL}/v1/check-groups`, { headers: { @@ -442,8 +630,9 @@ export const integrationAuthServiceFactory = ({ }; const getGithubOrgs = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TIntegrationAuthGithubOrgsDTO) => { + const appCfg = getConfig(); const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -453,10 +642,45 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); - const octokit = new Octokit({ + let octokit: Octokit; + const { installationId } = (integrationAuth.metadata as TIntegrationAuthMetadata) || {}; + if (installationId) { + octokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: appCfg.CLIENT_APP_ID_GITHUB_APP, + privateKey: appCfg.CLIENT_PRIVATE_KEY_GITHUB_APP, + installationId + } + }); + + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + const repos = await octokit.paginate("GET /installation/repositories", { + per_page: 100 + }); + + const orgSet: Set = new Set(); + + return repos + .filter((repo) => repo.owner.type === "Organization") + .map((repo) => ({ + name: repo.owner.login, + orgId: String(repo.owner.id) + })) + .filter((org) => { + const isOrgProcessed = orgSet.has(org.orgId); + if (!isOrgProcessed) { + orgSet.add(org.orgId); + } + + return !isOrgProcessed; + }); + } + + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); + octokit = new Octokit({ auth: accessToken }); @@ -465,7 +689,9 @@ export const integrationAuthServiceFactory = ({ "X-GitHub-Api-Version": "2022-11-28" } }); - if (!data) return []; + if (!data) { + return []; + } return data.map(({ login: name, id: orgId }) => ({ name, orgId: String(orgId) })); }; @@ -480,7 +706,7 @@ export const integrationAuthServiceFactory = ({ repoName }: TIntegrationAuthGithubEnvsDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -490,12 +716,27 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); - const octokit = new Octokit({ - auth: accessToken - }); + let octokit: Octokit; + const appCfg = getConfig(); + + const authMetadata = IntegrationAuthMetadataSchema.parse(integrationAuth.metadata || {}); + if (authMetadata.installationId) { + octokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: appCfg.CLIENT_APP_ID_GITHUB_APP, + privateKey: appCfg.CLIENT_PRIVATE_KEY_GITHUB_APP, + installationId: authMetadata.installationId + } + }); + } else { + octokit = new Octokit({ + auth: accessToken + }); + } const { data: { environments } @@ -512,7 +753,7 @@ export const integrationAuthServiceFactory = ({ const getQoveryOrgs = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TIntegrationAuthQoveryOrgsDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -522,8 +763,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); const { data } = await request.get<{ results: Array<{ id: string; name: string }> }>( `${IntegrationUrls.QOVERY_API_URL}/organization`, { @@ -546,7 +787,7 @@ export const integrationAuthServiceFactory = ({ region }: TIntegrationAuthAwsKmsKeyDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -556,17 +797,17 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessId, accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessId, accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); - AWS.config.update({ + const kms = new AWS.KMS({ region, credentials: { accessKeyId: String(accessId), secretAccessKey: accessToken } }); - const kms = new AWS.KMS(); + const aliases = await kms.listAliases({}).promise(); const keyAliases = aliases.Aliases!.filter((alias) => { @@ -604,7 +845,7 @@ export const integrationAuthServiceFactory = ({ orgId }: TIntegrationAuthQoveryProjectDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -614,8 +855,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (orgId) { const { data } = await request.get<{ results: Array<{ id: string; name: string }> }>( `${IntegrationUrls.QOVERY_API_URL}/organization/${orgId}/project`, @@ -640,7 +881,7 @@ export const integrationAuthServiceFactory = ({ actorOrgId }: TIntegrationAuthQoveryEnvironmentsDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -650,8 +891,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (projectId && projectId !== "none") { // TODO: fix const { data } = await request.get<{ results: { id: string; name: string }[] }>( @@ -681,7 +922,7 @@ export const integrationAuthServiceFactory = ({ environmentId }: TIntegrationAuthQoveryScopesDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -691,8 +932,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (environmentId) { const { data } = await request.get<{ results: { id: string; name: string }[] }>( `${IntegrationUrls.QOVERY_API_URL}/environment/${environmentId}/application`, @@ -721,7 +962,7 @@ export const integrationAuthServiceFactory = ({ environmentId }: TIntegrationAuthQoveryScopesDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -731,8 +972,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (environmentId) { const { data } = await request.get<{ results: { id: string; name: string }[] }>( `${IntegrationUrls.QOVERY_API_URL}/environment/${environmentId}/container`, @@ -761,7 +1002,7 @@ export const integrationAuthServiceFactory = ({ environmentId }: TIntegrationAuthQoveryScopesDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID ${id} not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -771,8 +1012,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (environmentId) { const { data } = await request.get<{ results: { id: string; name: string }[] }>( `${IntegrationUrls.QOVERY_API_URL}/environment/${environmentId}/job`, @@ -800,7 +1041,7 @@ export const integrationAuthServiceFactory = ({ actorOrgId }: TIntegrationAuthHerokuPipelinesDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -810,8 +1051,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); const { data } = await request.get( `${IntegrationUrls.HEROKU_API_URL}/pipeline-couplings`, @@ -840,7 +1081,7 @@ export const integrationAuthServiceFactory = ({ appId }: TIntegrationAuthRailwayEnvDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -850,8 +1091,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (appId) { const query = ` query GetEnvironments($projectId: String!, $after: String, $before: String, $first: Int, $isEphemeral: Boolean, $last: Int) { @@ -908,7 +1149,7 @@ export const integrationAuthServiceFactory = ({ appId }: TIntegrationAuthRailwayServicesDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -918,8 +1159,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (appId && appId !== "") { const query = ` @@ -982,7 +1223,7 @@ export const integrationAuthServiceFactory = ({ id }: TIntegrationAuthBitbucketWorkspaceDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -992,8 +1233,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); const workspaces: TBitbucketWorkspace[] = []; let hasNextPage = true; let workspaceUrl = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces`; @@ -1022,16 +1263,17 @@ export const integrationAuthServiceFactory = ({ return workspaces; }; - const getNorthFlankSecretGroups = async ({ - id, - actor, + const getBitbucketEnvironments = async ({ + workspaceSlug, + repoSlug, actorId, + actor, actorOrgId, actorAuthMethod, - appId - }: TIntegrationAuthNorthflankSecretGroupDTO) => { + id + }: TIntegrationAuthBitbucketEnvironmentsDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -1041,8 +1283,56 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); + const environments: TBitbucketEnvironment[] = []; + let hasNextPage = true; + + let environmentsUrl = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${workspaceSlug}/${repoSlug}/environments`; + + while (hasNextPage) { + // eslint-disable-next-line + const { data }: { data: { values: TBitbucketEnvironment[]; next: string } } = await request.get(environmentsUrl, { + headers: { + Authorization: `Bearer ${accessToken}`, + "Accept-Encoding": "application/json" + } + }); + + if (data?.values.length > 0) { + environments.push(...data.values); + } + + if (data.next) { + environmentsUrl = data.next; + } else { + hasNextPage = false; + } + } + return environments; + }; + + const getNorthFlankSecretGroups = async ({ + id, + actor, + actorId, + actorOrgId, + actorAuthMethod, + appId + }: TIntegrationAuthNorthflankSecretGroupDTO) => { + const integrationAuth = await integrationAuthDAL.findById(id); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + integrationAuth.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); const secretGroups: { name: string; groupId: string }[] = []; if (appId) { @@ -1099,7 +1389,7 @@ export const integrationAuthServiceFactory = ({ actor }: TGetIntegrationAuthTeamCityBuildConfigDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -1109,8 +1399,8 @@ export const integrationAuthServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); - const botKey = await projectBotService.getBotKey(integrationAuth.projectId); - const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); + const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); if (appId) { const { data: { buildType } @@ -1161,7 +1451,7 @@ export const integrationAuthServiceFactory = ({ actorOrgId }: TDeleteIntegrationAuthByIdDTO) => { const integrationAuth = await integrationAuthDAL.findById(id); - if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" }); + if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -1174,7 +1464,7 @@ export const integrationAuthServiceFactory = ({ const delIntegrationAuth = await integrationAuthDAL.transaction(async (tx) => { const doc = await integrationAuthDAL.deleteById(integrationAuth.id, tx); - if (!doc) throw new BadRequestError({ message: "Faled to find integration" }); + if (!doc) throw new NotFoundError({ message: `Integration auth with ID '${integrationAuth.id}' not found` }); await integrationDAL.delete({ integrationAuthId: doc.id }, tx); return doc; }); @@ -1182,8 +1472,58 @@ export const integrationAuthServiceFactory = ({ return delIntegrationAuth; }; + // At the moment, we only use this for Github App integration as it's a special case + const duplicateIntegrationAuth = async ({ + id, + actorId, + actor, + actorAuthMethod, + actorOrgId, + projectId + }: TDuplicateGithubIntegrationAuthDTO) => { + const integrationAuth = await integrationAuthDAL.findById(id); + if (!integrationAuth) { + throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` }); + } + + const { permission: sourcePermission } = await permissionService.getProjectPermission( + actor, + actorId, + integrationAuth.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(sourcePermission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.Integrations + ); + + const { permission: targetPermission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(targetPermission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.Integrations + ); + + const newIntegrationAuth: Omit & { id?: string } = { + ...integrationAuth, + id: undefined, + projectId + }; + + return integrationAuthDAL.create(newIntegrationAuth); + }; + return { listIntegrationAuthByProjectId, + listOrgIntegrationAuth, getIntegrationOptions, getIntegrationAuth, oauthExchange, @@ -1210,6 +1550,8 @@ export const integrationAuthServiceFactory = ({ getNorthFlankSecretGroups, getTeamcityBuildConfigs, getBitbucketWorkspaces, - getIntegrationAccessToken + getBitbucketEnvironments, + getIntegrationAccessToken, + duplicateIntegrationAuth }; }; diff --git a/backend/src/services/integration-auth/integration-auth-types.ts b/backend/src/services/integration-auth/integration-auth-types.ts index 0a816035cc..0b92c29f1a 100644 --- a/backend/src/services/integration-auth/integration-auth-types.ts +++ b/backend/src/services/integration-auth/integration-auth-types.ts @@ -1,3 +1,4 @@ +import { TIntegrations } from "@app/db/schemas"; import { TProjectPermission } from "@app/lib/types"; export type TGetIntegrationAuthDTO = { @@ -8,6 +9,7 @@ export type TOauthExchangeDTO = { integration: string; code: string; url?: string; + installationId?: string; } & TProjectPermission; export type TSaveIntegrationAccessTokenDTO = { @@ -17,6 +19,7 @@ export type TSaveIntegrationAccessTokenDTO = { url?: string; namespace?: string; refreshToken?: string; + awsAssumeIamRoleArn?: string; } & TProjectPermission; export type TDeleteIntegrationAuthsDTO = TProjectPermission & { @@ -27,6 +30,7 @@ export type TDeleteIntegrationAuthsDTO = TProjectPermission & { export type TIntegrationAuthAppsDTO = { id: string; teamId?: string; + azureDevOpsOrgName?: string; workspaceSlug?: string; } & Omit; @@ -95,6 +99,12 @@ export type TIntegrationAuthBitbucketWorkspaceDTO = { id: string; } & Omit; +export type TIntegrationAuthBitbucketEnvironmentsDTO = { + workspaceSlug: string; + repoSlug: string; + id: string; +} & Omit; + export type TIntegrationAuthNorthflankSecretGroupDTO = { id: string; appId: string; @@ -104,6 +114,10 @@ export type TDeleteIntegrationAuthByIdDTO = { id: string; } & Omit; +export type TDuplicateGithubIntegrationAuthDTO = { + id: string; +} & TProjectPermission; + export type TGetIntegrationAuthTeamCityBuildConfigDTO = { id: string; appId: string; @@ -140,6 +154,13 @@ export type TBitbucketWorkspace = { updated_on: string; }; +export type TBitbucketEnvironment = { + type: string; + uuid: string; + name: string; + slug: string; +}; + export type TNorthflankSecretGroup = { id: string; name: string; @@ -162,3 +183,13 @@ export type TTeamCityBuildConfig = { href: string; webUrl: string; }; + +export type TIntegrationsWithEnvironment = TIntegrations & { + environment?: + | { + id?: string | null | undefined; + name?: string | null | undefined; + } + | null + | undefined; +}; diff --git a/backend/src/services/integration-auth/integration-delete-secret.ts b/backend/src/services/integration-auth/integration-delete-secret.ts new file mode 100644 index 0000000000..fdefd0e621 --- /dev/null +++ b/backend/src/services/integration-auth/integration-delete-secret.ts @@ -0,0 +1,382 @@ +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +import { createAppAuth } from "@octokit/auth-app"; +import { retry } from "@octokit/plugin-retry"; +import { Octokit } from "@octokit/rest"; + +import { TIntegrationAuths, TIntegrations } from "@app/db/schemas"; +import { getConfig } from "@app/lib/config/env"; +import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; + +import { IntegrationMetadataSchema } from "../integration/integration-schema"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; +import { TProjectBotServiceFactory } from "../project-bot/project-bot-service"; +import { TSecretDALFactory } from "../secret/secret-dal"; +import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretImportDALFactory } from "../secret-import/secret-import-dal"; +import { fnSecretsV2FromImports } from "../secret-import/secret-import-fns"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; +import { IntegrationAuthMetadataSchema, TIntegrationAuthMetadata } from "./integration-auth-schema"; +import { TIntegrationAuthServiceFactory } from "./integration-auth-service"; +import { Integrations } from "./integration-list"; + +const MAX_SYNC_SECRET_DEPTH = 5; + +/** + * Return the secrets in a given [folderId] including secrets from + * nested imported folders recursively. + */ +const getIntegrationSecretsV2 = async ( + dto: { + projectId: string; + environment: string; + folderId: string; + depth: number; + secretPath: string; + decryptor: (value: Buffer | null | undefined) => string; + }, + secretV2BridgeDAL: Pick, + folderDAL: Pick, + secretImportDAL: Pick +) => { + const content: Record = {}; + if (dto.depth > MAX_SYNC_SECRET_DEPTH) { + logger.info( + `getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]` + ); + return content; + } + + // process secrets in current folder + const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId); + + secrets.forEach((secret) => { + const secretKey = secret.key; + content[secretKey] = true; + }); + + // check if current folder has any imports from other folders + const secretImports = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false }); + + // if no imports then return secrets in the current folder + if (!secretImports.length) return content; + const importedSecrets = await fnSecretsV2FromImports({ + decryptor: dto.decryptor, + folderDAL, + secretDAL: secretV2BridgeDAL, + secretImportDAL, + secretImports, + hasSecretAccess: () => true + }); + + for (let i = importedSecrets.length - 1; i >= 0; i -= 1) { + for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) { + const importedSecret = importedSecrets[i].secrets[j]; + if (!content[importedSecret.key]) { + content[importedSecret.key] = true; + } + } + } + return content; +}; + +/** + * Return the secrets in a given [folderId] including secrets from + * nested imported folders recursively. + */ +const getIntegrationSecretsV1 = async ( + dto: { + projectId: string; + environment: string; + folderId: string; + key: string; + depth: number; + }, + secretDAL: Pick, + folderDAL: Pick, + secretImportDAL: Pick +) => { + let content: Record = {}; + if (dto.depth > MAX_SYNC_SECRET_DEPTH) { + logger.info( + `getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]` + ); + return content; + } + + // process secrets in current folder + const secrets = await secretDAL.findByFolderId(dto.folderId); + secrets.forEach((secret) => { + const secretKey = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretKeyCiphertext, + iv: secret.secretKeyIV, + tag: secret.secretKeyTag, + key: dto.key + }); + + content[secretKey] = true; + }); + + // check if current folder has any imports from other folders + const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false }); + + // if no imports then return secrets in the current folder + if (!secretImport) return content; + + const importedFolders = await folderDAL.findByManySecretPath( + secretImport.map(({ importEnv, importPath }) => ({ + envId: importEnv.id, + secretPath: importPath + })) + ); + + for await (const folder of importedFolders) { + if (folder) { + // get secrets contained in each imported folder by recursively calling + // this function against the imported folder + const importedSecrets = await getIntegrationSecretsV1( + { + environment: dto.environment, + projectId: dto.projectId, + folderId: folder.id, + key: dto.key, + depth: dto.depth + 1 + }, + secretDAL, + folderDAL, + secretImportDAL + ); + + // add the imported secrets to the current folder secrets + content = { ...importedSecrets, ...content }; + } + } + + return content; +}; + +export const deleteGithubSecrets = async ({ + integration, + authMetadata, + secrets, + accessToken +}: { + integration: Omit; + authMetadata: TIntegrationAuthMetadata; + secrets: Record; + accessToken: string; +}) => { + interface GitHubSecret { + name: string; + created_at: string; + updated_at: string; + visibility?: "all" | "private" | "selected"; + selected_repositories_url?: string | undefined; + } + + const OctokitWithRetry = Octokit.plugin(retry); + let octokit: Octokit; + const appCfg = getConfig(); + + if (authMetadata.installationId) { + octokit = new OctokitWithRetry({ + authStrategy: createAppAuth, + auth: { + appId: appCfg.CLIENT_APP_ID_GITHUB_APP, + privateKey: appCfg.CLIENT_PRIVATE_KEY_GITHUB_APP, + installationId: authMetadata.installationId + } + }); + } else { + octokit = new OctokitWithRetry({ + auth: accessToken + }); + } + + enum GithubScope { + Repo = "github-repo", + Org = "github-org", + Env = "github-env" + } + + let encryptedGithubSecrets: GitHubSecret[]; + + switch (integration.scope) { + case GithubScope.Org: { + encryptedGithubSecrets = ( + await octokit.request("GET /orgs/{org}/actions/secrets", { + org: integration.owner as string + }) + ).data.secrets; + break; + } + case GithubScope.Env: { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + encryptedGithubSecrets = ( + await octokit.request("GET /repositories/{repository_id}/environments/{environment_name}/secrets", { + repository_id: Number(integration.appId), + environment_name: integration.targetEnvironmentId as string + }) + ).data.secrets; + break; + } + default: { + encryptedGithubSecrets = ( + await octokit.request("GET /repos/{owner}/{repo}/actions/secrets", { + owner: integration.owner as string, + repo: integration.app as string + }) + ).data.secrets; + break; + } + } + + for await (const encryptedSecret of encryptedGithubSecrets) { + if (encryptedSecret.name in secrets) { + switch (integration.scope) { + case GithubScope.Org: { + await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", { + org: integration.owner as string, + secret_name: encryptedSecret.name + }); + break; + } + case GithubScope.Env: { + await octokit.request( + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + { + repository_id: Number(integration.appId), + environment_name: integration.targetEnvironmentId as string, + secret_name: encryptedSecret.name + } + ); + break; + } + default: { + await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", { + owner: integration.owner as string, + repo: integration.app as string, + secret_name: encryptedSecret.name + }); + break; + } + } + + // small delay to prevent hitting API rate limits + await new Promise((resolve) => { + setTimeout(resolve, 50); + }); + } + } +}; + +export const deleteIntegrationSecrets = async ({ + integration, + integrationAuth, + integrationAuthService, + projectBotService, + secretV2BridgeDAL, + folderDAL, + secretDAL, + secretImportDAL, + kmsService +}: { + integration: Omit & { + projectId: string; + environment: { + id: string; + name: string; + slug: string; + }; + secretPath: string; + }; + integrationAuth: TIntegrationAuths; + integrationAuthService: Pick; + projectBotService: Pick; + secretV2BridgeDAL: Pick; + folderDAL: Pick; + secretImportDAL: Pick; + secretDAL: Pick; + kmsService: Pick; +}) => { + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integration.projectId); + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId: integration.projectId + }); + + const folder = await folderDAL.findBySecretPath( + integration.projectId, + integration.environment.slug, + integration.secretPath + ); + + if (!folder) { + throw new NotFoundError({ + message: `Folder with path '${integration.secretPath}' not found in environment with slug '${integration.environment.slug}'` + }); + } + + const { accessToken } = await integrationAuthService.getIntegrationAccessToken( + integrationAuth, + shouldUseSecretV2Bridge, + botKey + ); + + const secrets = shouldUseSecretV2Bridge + ? await getIntegrationSecretsV2( + { + environment: integration.environment.id, + secretPath: integration.secretPath, + projectId: integration.projectId, + folderId: folder.id, + depth: 1, + decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "") + }, + secretV2BridgeDAL, + folderDAL, + secretImportDAL + ) + : await getIntegrationSecretsV1( + { + environment: integration.environment.id, + projectId: integration.projectId, + folderId: folder.id, + key: botKey as string, + depth: 1 + }, + secretDAL, + folderDAL, + secretImportDAL + ); + + const suffixedSecrets: typeof secrets = {}; + const metadata = IntegrationMetadataSchema.parse(integration.metadata); + + if (metadata) { + Object.keys(secrets).forEach((key) => { + const prefix = metadata?.secretPrefix || ""; + const suffix = metadata?.secretSuffix || ""; + const newKey = prefix + key + suffix; + suffixedSecrets[newKey] = secrets[key]; + }); + } + + switch (integration.integration) { + case Integrations.GITHUB: { + await deleteGithubSecrets({ + integration, + authMetadata: IntegrationAuthMetadataSchema.parse(integrationAuth.metadata || {}), + accessToken, + secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets + }); + break; + } + default: + throw new BadRequestError({ + message: "Invalid integration" + }); + } +}; diff --git a/backend/src/services/integration-auth/integration-list.ts b/backend/src/services/integration-auth/integration-list.ts index edc426327a..007fccd525 100644 --- a/backend/src/services/integration-auth/integration-list.ts +++ b/backend/src/services/integration-auth/integration-list.ts @@ -15,6 +15,7 @@ export enum Integrations { FLYIO = "flyio", LARAVELFORGE = "laravel-forge", CIRCLECI = "circleci", + DATABRICKS = "databricks", TRAVISCI = "travisci", TEAMCITY = "teamcity", SUPABASE = "supabase", @@ -31,7 +32,9 @@ export enum Integrations { CLOUD_66 = "cloud-66", NORTHFLANK = "northflank", HASURA_CLOUD = "hasura-cloud", - RUNDECK = "rundeck" + RUNDECK = "rundeck", + AZURE_DEVOPS = "azure-devops", + AZURE_APP_CONFIGURATION = "azure-app-configuration" } export enum IntegrationType { @@ -72,6 +75,7 @@ export enum IntegrationUrls { RAILWAY_API_URL = "https://backboard.railway.app/graphql/v2", FLYIO_API_URL = "https://api.fly.io/graphql", CIRCLECI_API_URL = "https://circleci.com/api", + DATABRICKS_API_URL = "https:/xxxx.com/api", TRAVISCI_API_URL = "https://api.travis-ci.com", SUPABASE_API_URL = "https://api.supabase.com", LARAVELFORGE_API_URL = "https://forge.laravel.com", @@ -88,11 +92,14 @@ export enum IntegrationUrls { CLOUD_66_API_URL = "https://app.cloud66.com/api", NORTHFLANK_API_URL = "https://api.northflank.com", HASURA_CLOUD_API_URL = "https://data.pro.hasura.io/v1/graphql", + AZURE_DEVOPS_API_URL = "https://dev.azure.com", GCP_SECRET_MANAGER_SERVICE_NAME = "secretmanager.googleapis.com", GCP_SECRET_MANAGER_URL = `https://${GCP_SECRET_MANAGER_SERVICE_NAME}`, GCP_SERVICE_USAGE_URL = "https://serviceusage.googleapis.com", - GCP_CLOUD_PLATFORM_SCOPE = "https://www.googleapis.com/auth/cloud-platform" + GCP_CLOUD_PLATFORM_SCOPE = "https://www.googleapis.com/auth/cloud-platform", + + GITHUB_USER_INSTALLATIONS = "https://api.github.com/user/installations" } export const getIntegrationOptions = async () => { @@ -134,6 +141,7 @@ export const getIntegrationOptions = async () => { isAvailable: true, type: "oauth", clientId: appCfg.CLIENT_ID_GITHUB, + clientSlug: appCfg.CLIENT_SLUG_GITHUB_APP, docsLink: "" }, { @@ -199,6 +207,15 @@ export const getIntegrationOptions = async () => { clientId: appCfg.CLIENT_ID_AZURE, docsLink: "" }, + { + name: "Azure App Configuration", + slug: "azure-app-configuration", + image: "Microsoft Azure.png", + isAvailable: true, + type: "oauth", + clientId: appCfg.CLIENT_ID_AZURE, + docsLink: "" + }, { name: "Circle CI", slug: "circleci", @@ -208,6 +225,15 @@ export const getIntegrationOptions = async () => { clientId: "", docsLink: "" }, + { + name: "Databricks", + slug: "databricks", + image: "Databricks.png", + isAvailable: true, + type: "pat", + clientId: "", + docsLink: "" + }, { name: "GitLab", slug: "gitlab", @@ -308,7 +334,7 @@ export const getIntegrationOptions = async () => { docsLink: "" }, { - name: "BitBucket", + name: "Bitbucket", slug: "bitbucket", image: "BitBucket.png", isAvailable: true, @@ -378,6 +404,15 @@ export const getIntegrationOptions = async () => { type: "pat", clientId: "", docsLink: "" + }, + { + name: "Azure DevOps", + slug: "azure-devops", + image: "Microsoft Azure.png", + isAvailable: true, + type: "pat", + clientId: "", + docsLink: "" } ]; diff --git a/backend/src/services/integration-auth/integration-sync-secret.ts b/backend/src/services/integration-auth/integration-sync-secret.ts index 6351b4d824..197c1f9407 100644 --- a/backend/src/services/integration-auth/integration-sync-secret.ts +++ b/backend/src/services/integration-auth/integration-sync-secret.ts @@ -9,6 +9,7 @@ import { CreateSecretCommand, + DeleteSecretCommand, DescribeSecretCommand, GetSecretValueCommand, ResourceNotFoundException, @@ -17,14 +18,19 @@ import { UntagResourceCommand, UpdateSecretCommand } from "@aws-sdk/client-secrets-manager"; +import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts"; +import { createAppAuth } from "@octokit/auth-app"; import { Octokit } from "@octokit/rest"; -import AWS from "aws-sdk"; +import AWS, { AWSError } from "aws-sdk"; import { AxiosError } from "axios"; +import { randomUUID } from "crypto"; +import https from "https"; import sodium from "libsodium-wrappers"; import isEqual from "lodash.isequal"; import { z } from "zod"; -import { SecretType, TIntegrationAuths, TIntegrations, TSecrets } from "@app/db/schemas"; +import { SecretType, TIntegrationAuths, TIntegrations } from "@app/db/schemas"; +import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; @@ -32,6 +38,8 @@ import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/ import { TIntegrationDALFactory } from "../integration/integration-dal"; import { IntegrationMetadataSchema } from "../integration/integration-schema"; +import { IntegrationAuthMetadataSchema } from "./integration-auth-schema"; +import { TIntegrationsWithEnvironment } from "./integration-auth-types"; import { IntegrationInitialSyncBehavior, IntegrationMappingBehavior, @@ -203,6 +211,12 @@ const syncSecretsGCPSecretManager = async ({ } ); + if (!secrets[key].value) { + logger.warn( + `syncSecretsGcpsecretManager: create secret value in gcp where [key=${key}] and integration appId [appId=${integration.appId}]` + ); + } + await request.post( `${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${integration.appId}/secrets/${key}:addVersion`, { @@ -233,6 +247,12 @@ const syncSecretsGCPSecretManager = async ({ } ); } else if (secrets[key].value !== res[key]) { + if (!secrets[key].value) { + logger.warn( + `syncSecretsGcpsecretManager: update secret value in gcp where [key=${key}] and integration appId [appId=${integration.appId}]` + ); + } + await request.post( `${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${integration.appId}/secrets/${key}:addVersion`, { @@ -251,20 +271,207 @@ const syncSecretsGCPSecretManager = async ({ } }; +const syncSecretsAzureAppConfig = async ({ + integration, + secrets, + accessToken, + createManySecretsRawFn, + updateManySecretsRawFn, + integrationDAL +}: { + integration: TIntegrations & { + projectId: string; + environment: { + id: string; + name: string; + slug: string; + }; + secretPath: string; + }; + secrets: Record; + accessToken: string; + createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; + updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; + integrationDAL: Pick; +}) => { + interface AzureAppConfigKeyValue { + key: string; + value: string; + } + + const getCompleteAzureAppConfigValues = async (url: string) => { + let result: AzureAppConfigKeyValue[] = []; + while (url) { + const res = await request.get(url, { + headers: { + Authorization: `Bearer ${accessToken}` + }, + // we force IPV4 because docker setup fails with ipv6 + httpsAgent: new https.Agent({ + family: 4 + }) + }); + + result = result.concat(res.data.items); + url = res.data.nextLink; + } + + return result; + }; + + const metadata = IntegrationMetadataSchema.parse(integration.metadata); + const azureAppConfigSecrets = ( + await getCompleteAzureAppConfigValues( + `${integration.app}/kv?api-version=2023-11-01&key=${metadata.secretPrefix || ""}*` + ) + ).reduce( + (accum, entry) => { + accum[entry.key] = entry.value; + + return accum; + }, + {} as Record + ); + + const secretsToAdd: { [key: string]: string } = {}; + const secretsToUpdate: { [key: string]: string } = {}; + + Object.keys(azureAppConfigSecrets).forEach((key) => { + if (!integration.lastUsed) { + // first time using integration + // -> apply initial sync behavior + switch (metadata.initialSyncBehavior) { + case IntegrationInitialSyncBehavior.OVERWRITE_TARGET: { + if (!(key in secrets)) { + secrets[key] = null; + } + break; + } + case IntegrationInitialSyncBehavior.PREFER_TARGET: { + if (!(key in secrets)) { + secretsToAdd[key] = azureAppConfigSecrets[key]; + } else if (secrets[key]?.value !== azureAppConfigSecrets[key]) { + secretsToUpdate[key] = azureAppConfigSecrets[key]; + } + secrets[key] = { + value: azureAppConfigSecrets[key] + }; + break; + } + case IntegrationInitialSyncBehavior.PREFER_SOURCE: { + if (!(key in secrets)) { + secrets[key] = { + value: azureAppConfigSecrets[key] + }; + secretsToAdd[key] = azureAppConfigSecrets[key]; + } + break; + } + default: { + break; + } + } + } else if (!(key in secrets)) { + secrets[key] = null; + } + }); + + if (Object.keys(secretsToAdd).length) { + await createManySecretsRawFn({ + projectId: integration.projectId, + environment: integration.environment.slug, + path: integration.secretPath, + secrets: Object.keys(secretsToAdd).map((key) => ({ + secretName: key, + secretValue: secretsToAdd[key], + type: SecretType.Shared, + secretComment: "" + })) + }); + } + + if (Object.keys(secretsToUpdate).length) { + await updateManySecretsRawFn({ + projectId: integration.projectId, + environment: integration.environment.slug, + path: integration.secretPath, + secrets: Object.keys(secretsToUpdate).map((key) => ({ + secretName: key, + secretValue: secretsToUpdate[key], + type: SecretType.Shared, + secretComment: "" + })) + }); + } + + // create or update secrets on Azure App Config + for await (const key of Object.keys(secrets)) { + if (!(key in azureAppConfigSecrets) || secrets[key]?.value !== azureAppConfigSecrets[key]) { + await request.put( + `${integration.app}/kv/${key}?api-version=2023-11-01`, + { + value: secrets[key]?.value + }, + { + headers: { + Authorization: `Bearer ${accessToken}` + }, + // we force IPV4 because docker setup fails with ipv6 + httpsAgent: new https.Agent({ + family: 4 + }) + } + ); + } + } + + for await (const key of Object.keys(azureAppConfigSecrets)) { + if (!(key in secrets) || secrets[key] === null) { + // case: delete secret + await request.delete(`${integration.app}/kv/${key}?api-version=2023-11-01`, { + headers: { + Authorization: `Bearer ${accessToken}` + }, + // we force IPV4 because docker setup fails with ipv6 + httpsAgent: new https.Agent({ + family: 4 + }) + }); + } + } + + await integrationDAL.updateById(integration.id, { + lastUsed: new Date() + }); +}; + /** * Sync/push [secrets] to Azure Key Vault with vault URI [integration.app] */ const syncSecretsAzureKeyVault = async ({ integration, secrets, - accessToken + accessToken, + createManySecretsRawFn, + updateManySecretsRawFn }: { - integration: TIntegrations; + integration: TIntegrations & { + projectId: string; + environment: { + id: string; + name: string; + slug: string; + }; + secretPath: string; + }; secrets: Record; accessToken: string; + createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; + updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; }) => { interface GetAzureKeyVaultSecret { id: string; // secret URI + value: string; attributes: { enabled: true; created: number; @@ -361,6 +568,83 @@ const syncSecretsAzureKeyVault = async ({ } }); + const secretsToAdd: { [key: string]: string } = {}; + const secretsToUpdate: { [key: string]: string } = {}; + const secretKeysToRemoveFromDelete = new Set(); + + const metadata = IntegrationMetadataSchema.parse(integration.metadata); + if (!integration.lastUsed) { + Object.keys(res).forEach((key) => { + // first time using integration + const underscoredKey = key.replace(/-/g, "_"); + + // -> apply initial sync behavior + switch (metadata.initialSyncBehavior) { + case IntegrationInitialSyncBehavior.PREFER_TARGET: { + if (!(underscoredKey in secrets)) { + secretsToAdd[underscoredKey] = res[key].value; + setSecrets.push({ + key, + value: res[key].value + }); + } else if (secrets[underscoredKey]?.value !== res[key].value) { + secretsToUpdate[underscoredKey] = res[key].value; + const toEditSecretIndex = setSecrets.findIndex((secret) => secret.key === key); + if (toEditSecretIndex >= 0) { + setSecrets[toEditSecretIndex].value = res[key].value; + } + } + + secretKeysToRemoveFromDelete.add(key); + + break; + } + case IntegrationInitialSyncBehavior.PREFER_SOURCE: { + if (!(underscoredKey in secrets)) { + secretsToAdd[underscoredKey] = res[key].value; + setSecrets.push({ + key, + value: res[key].value + }); + } + + secretKeysToRemoveFromDelete.add(key); + break; + } + default: + break; + } + }); + } + + if (Object.keys(secretsToUpdate).length) { + await updateManySecretsRawFn({ + projectId: integration.projectId, + environment: integration.environment.slug, + path: integration.secretPath, + secrets: Object.keys(secretsToUpdate).map((key) => ({ + secretName: key, + secretValue: secretsToUpdate[key], + type: SecretType.Shared, + secretComment: "" + })) + }); + } + + if (Object.keys(secretsToAdd).length) { + await createManySecretsRawFn({ + projectId: integration.projectId, + environment: integration.environment.slug, + path: integration.secretPath, + secrets: Object.keys(secretsToAdd).map((key) => ({ + secretName: key, + secretValue: secretsToAdd[key], + type: SecretType.Shared, + secretComment: "" + })) + }); + } + const setSecretAzureKeyVault = async ({ key, value, @@ -428,7 +712,7 @@ const syncSecretsAzureKeyVault = async ({ }); } - for await (const deleteSecret of deleteSecrets) { + for await (const deleteSecret of deleteSecrets.filter((secret) => !secretKeysToRemoveFromDelete.has(secret.key))) { const { key } = deleteSecret; await request.delete(`${integration.app}/secrets/${key}?api-version=7.3`, { headers: { @@ -445,20 +729,64 @@ const syncSecretsAWSParameterStore = async ({ integration, secrets, accessId, - accessToken + accessToken, + projectId, + awsAssumeRoleArn }: { - integration: TIntegrations; + integration: TIntegrations & { secretPath: string; environment: { slug: string } }; secrets: Record; accessId: string | null; accessToken: string; + awsAssumeRoleArn: string | null; + projectId?: string; }) => { - if (!accessId) return; + const appCfg = getConfig(); + let response: { isSynced: boolean; syncMessage: string } | null = null; + + if (!accessId && !awsAssumeRoleArn) { + throw new Error("AWS access ID/AWS Assume Role is required"); + } + + let accessKeyId = ""; + let secretAccessKey = ""; + let sessionToken; + if (awsAssumeRoleArn) { + const client = new STSClient({ + region: integration.region as string, + credentials: + appCfg.CLIENT_ID_AWS_INTEGRATION && appCfg.CLIENT_SECRET_AWS_INTEGRATION + ? { + accessKeyId: appCfg.CLIENT_ID_AWS_INTEGRATION, + secretAccessKey: appCfg.CLIENT_SECRET_AWS_INTEGRATION + } + : undefined + }); + const command = new AssumeRoleCommand({ + RoleArn: awsAssumeRoleArn, + RoleSessionName: `infisical-parameter-store-${randomUUID()}`, + DurationSeconds: 900, // 15mins + ExternalId: projectId + }); + const assumeRes = await client.send(command); + + if (!assumeRes.Credentials?.AccessKeyId || !assumeRes.Credentials?.SecretAccessKey) { + throw new Error("Failed to assume role"); + } + + accessKeyId = assumeRes.Credentials?.AccessKeyId; + secretAccessKey = assumeRes.Credentials?.SecretAccessKey; + sessionToken = assumeRes.Credentials?.SessionToken; + } else { + accessKeyId = accessId as string; + secretAccessKey = accessToken; + } const config = new AWS.Config({ region: integration.region as string, credentials: { - accessKeyId: accessId, - secretAccessKey: accessToken + accessKeyId, + secretAccessKey, + sessionToken } }); @@ -468,9 +796,11 @@ const syncSecretsAWSParameterStore = async ({ }); ssm.config.update(config); - const metadata = z.record(z.any()).parse(integration.metadata || {}); - const awsParameterStoreSecretsObj: Record = {}; - + const metadata = IntegrationMetadataSchema.parse(integration.metadata); + const awsParameterStoreSecretsObj: Record = {}; + logger.info( + `getIntegrationSecrets: integration sync triggered for ssm with [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [shouldDisableDelete=${metadata.shouldDisableDelete}]` + ); // now fetch all aws parameter store secrets let hasNext = true; let nextToken: string | undefined; @@ -497,6 +827,57 @@ const syncSecretsAWSParameterStore = async ({ nextToken = parameters.NextToken; } + let areParametersKmsKeysFetched = false; + + if (metadata.kmsKeyId) { + // we put this inside a try catch so that existing integrations without the ssm:DescribeParameters + // AWS permission will not break + try { + let hasNextDescribePage = true; + let describeNextToken: string | undefined; + + while (hasNextDescribePage) { + const parameters = await ssm + .describeParameters({ + MaxResults: 10, + NextToken: describeNextToken, + ParameterFilters: [ + { + Key: "Path", + Option: "OneLevel", + Values: [integration.path as string] + } + ] + }) + .promise(); + + if (parameters.Parameters) { + parameters.Parameters.forEach((parameter) => { + if (parameter.Name) { + const secKey = parameter.Name.substring((integration.path as string).length); + awsParameterStoreSecretsObj[secKey].KeyId = parameter.KeyId; + } + }); + } + areParametersKmsKeysFetched = true; + hasNextDescribePage = Boolean(parameters.NextToken); + describeNextToken = parameters.NextToken; + } + } catch (error) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if ((error as any).code === "AccessDeniedException") { + logger.error( + `AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)` + ); + } + + response = { + isSynced: false, + syncMessage: (error as AWSError)?.message || "Error syncing with AWS Parameter Store" + }; + } + } + // Identify secrets to create // don't use Promise.all() and promise map here // it will cause rate limit @@ -506,32 +887,71 @@ const syncSecretsAWSParameterStore = async ({ // case: secret does not exist in AWS parameter store // -> create secret if (secrets[key].value) { + logger.info( + `getIntegrationSecrets: create secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]` + ); await ssm .putParameter({ Name: `${integration.path}${key}`, Type: "SecureString", Value: secrets[key].value, ...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }), - // Overwrite: true, - Tags: metadata.secretAWSTag - ? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ - Key: tag.key, - Value: tag.value - })) - : [] + Overwrite: true }) .promise(); + if (metadata.secretAWSTag?.length) { + try { + await ssm + .addTagsToResource({ + ResourceType: "Parameter", + ResourceId: `${integration.path}${key}`, + Tags: metadata.secretAWSTag + ? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ + Key: tag.key, + Value: tag.value + })) + : [] + }) + .promise(); + } catch (err) { + logger.error( + err, + `getIntegrationSecrets: create secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]` + ); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if ((err as any).code === "AccessDeniedException") { + logger.error( + `AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)` + ); + } + + response = { + isSynced: false, + syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store" + }; + } + } } // case: secret exists in AWS parameter store } else { - // -> update secret - if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) { + logger.info( + `getIntegrationSecrets: update secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]` + ); + + const shouldUpdateKms = + areParametersKmsKeysFetched && + Boolean(metadata.kmsKeyId) && + awsParameterStoreSecretsObj[key].KeyId !== metadata.kmsKeyId; + + // we ensure that the KMS key configured in the integration is applied for ALL parameters on AWS + if (secrets[key].value && (shouldUpdateKms || awsParameterStoreSecretsObj[key].Value !== secrets[key].value)) { await ssm .putParameter({ Name: `${integration.path}${key}`, Type: "SecureString", Value: secrets[key].value, - Overwrite: true + Overwrite: true, + ...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }) }) .promise(); } @@ -551,12 +971,21 @@ const syncSecretsAWSParameterStore = async ({ }) .promise(); } catch (err) { + logger.error( + err, + `getIntegrationSecrets: update secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]` + ); // eslint-disable-next-line @typescript-eslint/no-explicit-any if ((err as any).code === "AccessDeniedException") { logger.error( `AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)` ); } + + response = { + isSynced: false, + syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store" + }; } } } @@ -568,9 +997,18 @@ const syncSecretsAWSParameterStore = async ({ } if (!metadata.shouldDisableDelete) { + logger.info( + `getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=1]` + ); for (const key in awsParameterStoreSecretsObj) { if (Object.hasOwn(awsParameterStoreSecretsObj, key)) { - if (!(key in secrets)) { + logger.info( + `getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=2]` + ); + if (!(key in secrets) || !secrets[key].value) { + logger.info( + `getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=3]` + ); // case: // -> delete secret await ssm @@ -578,6 +1016,9 @@ const syncSecretsAWSParameterStore = async ({ Name: awsParameterStoreSecretsObj[key].Name as string }) .promise(); + logger.info( + `getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=4]` + ); } await new Promise((resolve) => { setTimeout(resolve, 50); @@ -585,6 +1026,8 @@ const syncSecretsAWSParameterStore = async ({ } } } + + return response; }; /** @@ -594,22 +1037,61 @@ const syncSecretsAWSSecretManager = async ({ integration, secrets, accessId, - accessToken + accessToken, + awsAssumeRoleArn, + projectId }: { integration: TIntegrations; secrets: Record; accessId: string | null; accessToken: string; + awsAssumeRoleArn: string | null; + projectId?: string; }) => { + const appCfg = getConfig(); const metadata = z.record(z.any()).parse(integration.metadata || {}); - if (!accessId) return; + if (!accessId && !awsAssumeRoleArn) { + throw new Error("AWS access ID/AWS Assume Role is required"); + } + + let accessKeyId = ""; + let secretAccessKey = ""; + let sessionToken; + if (awsAssumeRoleArn) { + const client = new STSClient({ + region: integration.region as string, + credentials: + appCfg.CLIENT_ID_AWS_INTEGRATION && appCfg.CLIENT_SECRET_AWS_INTEGRATION + ? { + accessKeyId: appCfg.CLIENT_ID_AWS_INTEGRATION, + secretAccessKey: appCfg.CLIENT_SECRET_AWS_INTEGRATION + } + : undefined + }); + const command = new AssumeRoleCommand({ + RoleArn: awsAssumeRoleArn, + RoleSessionName: `infisical-sm-${randomUUID()}`, + DurationSeconds: 900, // 15mins + ExternalId: projectId + }); + const response = await client.send(command); + if (!response.Credentials?.AccessKeyId || !response.Credentials?.SecretAccessKey) + throw new Error("Failed to assume role"); + accessKeyId = response.Credentials?.AccessKeyId; + secretAccessKey = response.Credentials?.SecretAccessKey; + sessionToken = response.Credentials?.SessionToken; + } else { + accessKeyId = accessId as string; + secretAccessKey = accessToken; + } const secretsManager = new SecretsManagerClient({ region: integration.region as string, credentials: { - accessKeyId: accessId, - secretAccessKey: accessToken + accessKeyId, + secretAccessKey, + sessionToken } }); @@ -634,12 +1116,21 @@ const syncSecretsAWSSecretManager = async ({ } if (!isEqual(secretToCompare, secretValue)) { - await secretsManager.send( - new UpdateSecretCommand({ - SecretId: secretId, - SecretString: typeof secretValue === "string" ? secretValue : JSON.stringify(secretValue) - }) - ); + if (secretValue) { + await secretsManager.send( + new UpdateSecretCommand({ + SecretId: secretId, + SecretString: typeof secretValue === "string" ? secretValue : JSON.stringify(secretValue) + }) + ); + // delete it + } else { + await secretsManager.send( + new DeleteSecretCommand({ + SecretId: secretId + }) + ); + } } const secretAWSTag = metadata.secretAWSTag as { key: string; value: string }[] | undefined; @@ -722,18 +1213,26 @@ const syncSecretsAWSSecretManager = async ({ } } } catch (err) { - // case when AWS manager can't find the specified secret + // case 1: when AWS manager can't find the specified secret if (err instanceof ResourceNotFoundException && secretsManager) { - await secretsManager.send( - new CreateSecretCommand({ - Name: secretId, - SecretString: typeof secretValue === "string" ? secretValue : JSON.stringify(secretValue), - ...(metadata.kmsKeyId && { KmsKeyId: metadata.kmsKeyId }), - Tags: metadata.secretAWSTag - ? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value })) - : [] - }) - ); + if (secretValue) { + await secretsManager.send( + new CreateSecretCommand({ + Name: secretId, + SecretString: typeof secretValue === "string" ? secretValue : JSON.stringify(secretValue), + ...(metadata.kmsKeyId && { KmsKeyId: metadata.kmsKeyId }), + Tags: metadata.secretAWSTag + ? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ + Key: tag.key, + Value: tag.value + })) + : [] + }) + ); + } + // case 2: something unexpected went wrong, so we'll throw the error to reflect the error in the integration sync status + } else { + throw err; } } }; @@ -753,14 +1252,12 @@ const syncSecretsAWSSecretManager = async ({ const syncSecretsHeroku = async ({ createManySecretsRawFn, updateManySecretsRawFn, - integrationDAL, integration, secrets, accessToken }: { - createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; - updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; - integrationDAL: Pick; + createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; + updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; integration: TIntegrations & { projectId: string; environment: { @@ -862,10 +1359,6 @@ const syncSecretsHeroku = async ({ } } ); - - await integrationDAL.updateById(integration.id, { - lastUsed: new Date() - }); }; /** @@ -1265,11 +1758,13 @@ const syncSecretsNetlify = async ({ */ const syncSecretsGitHub = async ({ integration, + integrationAuth, secrets, accessToken, appendices }: { integration: TIntegrations; + integrationAuth: TIntegrationAuths; secrets: Record; accessToken: string; appendices?: { prefix: string; suffix: string }; @@ -1291,9 +1786,24 @@ const syncSecretsGitHub = async ({ selected_repositories_url?: string | undefined; } - const octokit = new Octokit({ - auth: accessToken - }); + const authMetadata = IntegrationAuthMetadataSchema.parse(integrationAuth.metadata || {}); + let octokit: Octokit; + const appCfg = getConfig(); + + if (authMetadata.installationId) { + octokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: appCfg.CLIENT_APP_ID_GITHUB_APP, + privateKey: appCfg.CLIENT_PRIVATE_KEY_GITHUB_APP, + installationId: authMetadata.installationId + } + }); + } else { + octokit = new Octokit({ + auth: accessToken + }); + } enum GithubScope { Repo = "github-repo", @@ -1421,7 +1931,11 @@ const syncSecretsGitHub = async ({ await octokit.request("PUT /orgs/{org}/actions/secrets/{secret_name}", { org: integration.owner as string, secret_name: key, - visibility: "all", + visibility: metadata.githubVisibility ?? "all", + ...(metadata.githubVisibility === "selected" && { + // we need to map the githubVisibilityRepoIds to numbers + selected_repository_ids: metadata.githubVisibilityRepoIds?.map(Number) ?? [] + }), encrypted_value: encryptedSecret, key_id: repoPublicKey.key_id }); @@ -1721,22 +2235,62 @@ const syncSecretsCircleCI = async ({ secrets: Record; accessToken: string; }) => { - const circleciOrganizationDetail = ( - await request.get(`${IntegrationUrls.CIRCLECI_API_URL}/v2/me/collaborations`, { + const getProjectSlug = async () => { + const requestConfig = { headers: { "Circle-Token": accessToken, "Accept-Encoding": "application/json" } - }) - ).data[0]; + }; - const { slug } = circleciOrganizationDetail; + try { + const projectDetails = ( + await request.get<{ slug: string }>( + `${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${integration.appId}`, + requestConfig + ) + ).data; + + return projectDetails.slug; + } catch (err) { + if (err instanceof AxiosError) { + if (err.response?.data?.message !== "Not Found") { + throw new Error("Failed to get project slug from CircleCI during first attempt."); + } + } + } + + // For backwards compatibility with old CircleCI integrations where we don't keep track of the organization name, so we can't filter by organization + try { + const circleCiOrganization = ( + await request.get<{ slug: string; name: string }[]>( + `${IntegrationUrls.CIRCLECI_API_URL}/v2/me/collaborations`, + requestConfig + ) + ).data; + + // Case 1: This is a new integration where the organization name is stored under `integration.owner` + if (integration.owner) { + const org = circleCiOrganization.find((o) => o.name === integration.owner); + if (org) { + return `${org.slug}/${integration.app}`; + } + } + + // Case 2: This is an old integration where the organization name is not stored, so we have to assume the first organization is the correct one + return `${circleCiOrganization[0].slug}/${integration.app}`; + } catch (err) { + throw new Error("Failed to get project slug from CircleCI during second attempt."); + } + }; + + const projectSlug = await getProjectSlug(); // sync secrets to CircleCI await Promise.all( Object.keys(secrets).map(async (key) => request.post( - `${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`, + `${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar`, { name: key, value: secrets[key].value @@ -1754,7 +2308,7 @@ const syncSecretsCircleCI = async ({ // get secrets from CircleCI const getSecretsRes = ( await request.get<{ items: { name: string }[] }>( - `${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`, + `${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar`, { headers: { "Circle-Token": accessToken, @@ -1768,11 +2322,82 @@ const syncSecretsCircleCI = async ({ await Promise.all( getSecretsRes.map(async (sec) => { if (!(sec.name in secrets)) { - return request.delete( - `${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar/${sec.name}`, + return request.delete(`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar/${sec.name}`, { + headers: { + "Circle-Token": accessToken, + "Content-Type": "application/json" + } + }); + } + }) + ); +}; + +/** + * Sync/push [secrets] to Databricks project + */ +const syncSecretsDatabricks = async ({ + integration, + integrationAuth, + secrets, + accessToken +}: { + integration: TIntegrations; + integrationAuth: TIntegrationAuths; + secrets: Record; + accessToken: string; +}) => { + const databricksApiUrl = `${integrationAuth.url}/api`; + + // sync secrets to Databricks + await Promise.all( + Object.keys(secrets).map(async (key) => + request.post( + `${databricksApiUrl}/2.0/secrets/put`, + { + scope: integration.app, + key, + string_value: secrets[key].value + }, + { + headers: { + Authorization: `Bearer ${accessToken}`, + "Accept-Encoding": "application/json" + } + } + ) + ) + ); + + // get secrets from Databricks + const getSecretsRes = ( + await request.get<{ secrets: { key: string; last_updated_timestamp: number }[] }>( + `${databricksApiUrl}/2.0/secrets/list`, + { + headers: { + Authorization: `Bearer ${accessToken}`, + "Content-Type": "application/json" + }, + params: { + scope: integration.app + } + } + ) + ).data.secrets; + + // delete secrets from Databricks + await Promise.all( + getSecretsRes.map(async (sec) => { + if (!(sec.key in secrets)) { + return request.post( + `${databricksApiUrl}/2.0/secrets/delete`, + { + scope: integration.app, + key: sec.key + }, { headers: { - "Circle-Token": accessToken, + Authorization: `Bearer ${accessToken}`, "Content-Type": "application/json" } } @@ -1872,6 +2497,116 @@ const syncSecretsTravisCI = async ({ } }; +/** + * Sync/push [secrets] to GitLab repo with name [integration.app] + */ +const syncSecretsAzureDevops = async ({ + integrationAuth, + integration, + secrets, + accessToken +}: { + integrationAuth: TIntegrationAuths; + integration: TIntegrationsWithEnvironment; + secrets: Record; + accessToken: string; +}) => { + if (!integration.appId || !integration.app) { + throw new Error("Azure DevOps: orgId and projectId are required"); + } + if (!integration.environment || !integration.environment.name) { + throw new Error("Azure DevOps: environment is required"); + } + const headers = { + Authorization: `Basic ${accessToken}` + }; + const azureDevopsApiUrl = integrationAuth.url ? `${integrationAuth.url}` : IntegrationUrls.AZURE_DEVOPS_API_URL; + + const getEnvGroupId = async (orgId: string, project: string, env: string) => { + let groupId; + const url: string | null = + `${azureDevopsApiUrl}/${orgId}/${project}/_apis/distributedtask/variablegroups?api-version=7.2-preview.2`; + + const response = await request.get(url, { headers }); + for (const group of response.data.value) { + const groupName = group.name; + if (groupName === env) { + groupId = group.id; + return { groupId, groupName }; + } + } + return { groupId: "", groupName: "" }; + }; + + const { groupId, groupName } = await getEnvGroupId(integration.app, integration.appId, integration.environment.name); + + const variables: Record = {}; + for (const key of Object.keys(secrets)) { + variables[key] = { value: secrets[key].value, isSecret: true }; + } + + if (!groupId) { + // create new variable group if not present + const url = `${azureDevopsApiUrl}/${integration.app}/_apis/distributedtask/variablegroups?api-version=7.2-preview.2`; + const config = { + method: "POST", + url, + data: { + name: integration.environment.name, + description: integration.environment.name, + type: "Vsts", + owner: "Library", + variables, + variableGroupProjectReferences: [ + { + name: integration.environment.name, + projectReference: { + name: integration.appId + } + } + ] + }, + headers: { + headers + } + }; + + const res = await request.post(url, config.data, config.headers); + if (res.status !== 200) { + throw new Error(`Azure DevOps: Failed to create variable group: ${res.statusText}`); + } + } else { + // sync variables for pre-existing variable group + const url = `${azureDevopsApiUrl}/${integration.app}/_apis/distributedtask/variablegroups/${groupId}?api-version=7.2-preview.2`; + const config = { + method: "PUT", + url, + data: { + name: groupName, + description: groupName, + type: "Vsts", + owner: "Library", + variables, + variableGroupProjectReferences: [ + { + name: groupName, + projectReference: { + name: integration.appId + } + } + ] + }, + headers: { + headers + } + }; + const res = await request.put(url, config.data, config.headers); + if (res.status !== 200) { + throw new Error(`Azure DevOps: Failed to update variable group: ${res.statusText}`); + } + } +}; + /** * Sync/push [secrets] to GitLab repo with name [integration.app] */ @@ -2324,8 +3059,8 @@ const syncSecretsTerraformCloud = async ({ accessToken, integrationDAL }: { - createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; - updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; + createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; + updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; integration: TIntegrations & { projectId: string; environment: { @@ -2656,7 +3391,9 @@ const syncSecretsHashiCorpVault = async ({ accessId: string | null; accessToken: string; }) => { - if (!accessId) return; + if (!accessId) { + throw new Error("Access ID is required"); + } interface LoginAppRoleRes { auth: { @@ -2894,7 +3631,14 @@ const syncSecretsBitBucket = async ({ const res: { [key: string]: BitbucketVariable } = {}; let hasNextPage = true; - let variablesUrl = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/pipelines_config/variables`; + + const rootUrl = integration.targetServiceId + ? // scope: deployment environment + `${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/deployments_config/environments/${integration.targetServiceId}/variables` + : // scope: repository + `${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/pipelines_config/variables`; + + let variablesUrl = rootUrl; while (hasNextPage) { const { data }: { data: VariablesResponse } = await request.get(variablesUrl, { @@ -2921,7 +3665,7 @@ const syncSecretsBitBucket = async ({ if (key in res) { // update existing secret await request.put( - `${variablesUrl}/${res[key].uuid}`, + `${rootUrl}/${res[key].uuid}`, { key, value: secrets[key].value, @@ -2937,7 +3681,7 @@ const syncSecretsBitBucket = async ({ } else { // create new secret await request.post( - variablesUrl, + rootUrl, { key, value: secrets[key].value, @@ -3466,10 +4210,12 @@ export const syncIntegrationSecrets = async ({ secrets, accessId, accessToken, - appendices + awsAssumeRoleArn, + appendices, + projectId }: { - createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; - updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; + createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; + updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; integrationDAL: Pick; integration: TIntegrations & { projectId: string; @@ -3483,9 +4229,13 @@ export const syncIntegrationSecrets = async ({ integrationAuth: TIntegrationAuths; secrets: Record; accessId: string | null; + awsAssumeRoleArn: string | null; accessToken: string; appendices?: { prefix: string; suffix: string }; + projectId?: string; }) => { + let response: { isSynced: boolean; syncMessage: string } | null = null; + switch (integration.integration) { case Integrations.GCP_SECRET_MANAGER: await syncSecretsGCPSecretManager({ @@ -3496,17 +4246,40 @@ export const syncIntegrationSecrets = async ({ break; case Integrations.AZURE_KEY_VAULT: await syncSecretsAzureKeyVault({ + integration, + secrets, + accessToken, + createManySecretsRawFn, + updateManySecretsRawFn + }); + break; + + case Integrations.AZURE_DEVOPS: + await syncSecretsAzureDevops({ + integrationAuth, integration, secrets, accessToken }); break; + case Integrations.AZURE_APP_CONFIGURATION: + await syncSecretsAzureAppConfig({ + integration, + integrationDAL, + secrets, + accessToken, + createManySecretsRawFn, + updateManySecretsRawFn + }); + break; case Integrations.AWS_PARAMETER_STORE: - await syncSecretsAWSParameterStore({ + response = await syncSecretsAWSParameterStore({ integration, secrets, accessId, - accessToken + accessToken, + awsAssumeRoleArn, + projectId }); break; case Integrations.AWS_SECRET_MANAGER: @@ -3514,14 +4287,15 @@ export const syncIntegrationSecrets = async ({ integration, secrets, accessId, - accessToken + accessToken, + awsAssumeRoleArn, + projectId }); break; case Integrations.HEROKU: await syncSecretsHeroku({ createManySecretsRawFn, updateManySecretsRawFn, - integrationDAL, integration, secrets, accessToken @@ -3546,6 +4320,7 @@ export const syncIntegrationSecrets = async ({ case Integrations.GITHUB: await syncSecretsGitHub({ integration, + integrationAuth, secrets, accessToken, appendices @@ -3587,6 +4362,14 @@ export const syncIntegrationSecrets = async ({ accessToken }); break; + case Integrations.DATABRICKS: + await syncSecretsDatabricks({ + integration, + integrationAuth, + secrets, + accessToken + }); + break; case Integrations.LARAVELFORGE: await syncSecretsLaravelForge({ integration, @@ -3727,4 +4510,6 @@ export const syncIntegrationSecrets = async ({ default: throw new BadRequestError({ message: "Invalid integration" }); } + + return response; }; diff --git a/backend/src/services/integration-auth/integration-token.ts b/backend/src/services/integration-auth/integration-token.ts index 0907bd0747..362b20a07d 100644 --- a/backend/src/services/integration-auth/integration-token.ts +++ b/backend/src/services/integration-auth/integration-token.ts @@ -2,7 +2,7 @@ import jwt from "jsonwebtoken"; import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, InternalServerError, NotFoundError } from "@app/lib/errors"; import { Integrations, IntegrationUrls } from "./integration-list"; @@ -131,6 +131,35 @@ const exchangeCodeAzure = async ({ code }: { code: string }) => { }; }; +const exchangeCodeAzureAppConfig = async ({ code }: { code: string }) => { + const accessExpiresAt = new Date(); + const appCfg = getConfig(); + if (!appCfg.CLIENT_ID_AZURE || !appCfg.CLIENT_SECRET_AZURE) { + throw new BadRequestError({ message: "Missing client id and client secret" }); + } + const res = ( + await request.post( + IntegrationUrls.AZURE_TOKEN_URL, + new URLSearchParams({ + grant_type: "authorization_code", + code, + scope: "https://azconfig.io/.default openid offline_access", + client_id: appCfg.CLIENT_ID_AZURE, + client_secret: appCfg.CLIENT_SECRET_AZURE, + redirect_uri: `${appCfg.SITE_URL}/integrations/azure-app-configuration/oauth2/callback` + }) + ) + ).data; + + accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in); + + return { + accessToken: res.access_token, + refreshToken: res.refresh_token, + accessExpiresAt + }; +}; + const exchangeCodeHeroku = async ({ code }: { code: string }) => { const accessExpiresAt = new Date(); const appCfg = getConfig(); @@ -234,12 +263,73 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => { }; }; -const exchangeCodeGithub = async ({ code }: { code: string }) => { +const exchangeCodeGithub = async ({ code, installationId }: { code: string; installationId?: string }) => { const appCfg = getConfig(); - if (!appCfg.CLIENT_ID_GITHUB || !appCfg.CLIENT_SECRET_GITHUB) { - throw new BadRequestError({ message: "Missing client id and client secret" }); + + if (!installationId && (!appCfg.CLIENT_ID_GITHUB || !appCfg.CLIENT_SECRET_GITHUB)) { + throw new InternalServerError({ message: "Missing client id and client secret" }); } + if (installationId && (!appCfg.CLIENT_ID_GITHUB_APP || !appCfg.CLIENT_SECRET_GITHUB_APP)) { + throw new InternalServerError({ + message: "Missing Github app client ID and client secret" + }); + } + + if (installationId) { + // handle app installations + const oauthRes = ( + await request.get(IntegrationUrls.GITHUB_TOKEN_URL, { + params: { + client_id: appCfg.CLIENT_ID_GITHUB_APP, + client_secret: appCfg.CLIENT_SECRET_GITHUB_APP, + code, + redirect_uri: `${appCfg.SITE_URL}/integrations/github/oauth2/callback` + }, + headers: { + Accept: "application/json", + "Accept-Encoding": "application/json" + } + }) + ).data; + + // use access token to validate installation ID + const installationsRes = ( + await request.get<{ + installations: { + id: number; + account: { + login: string; + }; + }[]; + }>(IntegrationUrls.GITHUB_USER_INSTALLATIONS, { + headers: { + Accept: "application/json", + Authorization: `Bearer ${oauthRes.access_token}`, + "Accept-Encoding": "application/json" + } + }) + ).data; + + const matchingInstallation = installationsRes.installations.find( + (installation) => installation.id === +installationId + ); + + if (!matchingInstallation) { + throw new ForbiddenRequestError({ + message: "User has no access to the provided installation" + }); + } + + return { + accessToken: "", // for github app integrations, we only need the installationID from the metadata + refreshToken: null, + accessExpiresAt: null, + installationName: matchingInstallation.account.login + }; + } + + // handle oauth github integration const res = ( await request.get(IntegrationUrls.GITHUB_TOKEN_URL, { params: { @@ -346,6 +436,7 @@ type TExchangeReturn = { url?: string; teamId?: string; accountId?: string; + installationName?: string; }; /** @@ -355,11 +446,13 @@ type TExchangeReturn = { export const exchangeCode = async ({ integration, code, - url + url, + installationId }: { integration: string; code: string; url?: string; + installationId?: string; }): Promise => { switch (integration) { case Integrations.GCP_SECRET_MANAGER: @@ -370,6 +463,10 @@ export const exchangeCode = async ({ return exchangeCodeAzure({ code }); + case Integrations.AZURE_APP_CONFIGURATION: + return exchangeCodeAzureAppConfig({ + code + }); case Integrations.HEROKU: return exchangeCodeHeroku({ code @@ -384,7 +481,8 @@ export const exchangeCode = async ({ }); case Integrations.GITHUB: return exchangeCodeGithub({ - code + code, + installationId }); case Integrations.GITLAB: return exchangeCodeGitlab({ @@ -396,7 +494,7 @@ export const exchangeCode = async ({ code }); default: - throw new BadRequestError({ message: "Unknown integration" }); + throw new NotFoundError({ message: "Unknown integration" }); } }; @@ -681,6 +779,7 @@ export const exchangeRefresh = async ( accessExpiresAt: Date; }> => { switch (integration) { + case Integrations.AZURE_APP_CONFIGURATION: case Integrations.AZURE_KEY_VAULT: return exchangeRefreshAzure({ refreshToken diff --git a/backend/src/services/integration/integration-dal.ts b/backend/src/services/integration/integration-dal.ts index bada253c59..0c365eb065 100644 --- a/backend/src/services/integration/integration-dal.ts +++ b/backend/src/services/integration/integration-dal.ts @@ -22,7 +22,7 @@ export const integrationDALFactory = (db: TDbClient) => { const find = async (filter: Partial, tx?: Knex) => { try { - const docs = await integrationFindQuery(tx || db, filter); + const docs = await integrationFindQuery(tx || db.replicaNode(), filter); return docs.map(({ envId, envSlug, envName, ...el }) => ({ ...el, environment: { @@ -38,7 +38,7 @@ export const integrationDALFactory = (db: TDbClient) => { const findOne = async (filter: Partial, tx?: Knex) => { try { - const doc = await integrationFindQuery(tx || db, filter).first(); + const doc = await integrationFindQuery(tx || db.replicaNode(), filter).first(); if (!doc) return; const { envName: name, envSlug: slug, envId: id, ...el } = doc; @@ -50,7 +50,7 @@ export const integrationDALFactory = (db: TDbClient) => { const findById = async (id: string, tx?: Knex) => { try { - const doc = await integrationFindQuery(tx || db, { + const doc = await integrationFindQuery(tx || db.replicaNode(), { [`${TableName.Integration}.id` as "id"]: id }).first(); if (!doc) return; @@ -64,7 +64,7 @@ export const integrationDALFactory = (db: TDbClient) => { const findByProjectId = async (projectId: string, tx?: Knex) => { try { - const integrations = await (tx || db)(TableName.Integration) + const integrations = await (tx || db.replicaNode())(TableName.Integration) .where(`${TableName.Environment}.projectId`, projectId) .join(TableName.Environment, `${TableName.Integration}.envId`, `${TableName.Environment}.id`) .select(db.ref("name").withSchema(TableName.Environment).as("envName")) @@ -90,7 +90,7 @@ export const integrationDALFactory = (db: TDbClient) => { // used for syncing secrets // this will populate integration auth also const findByProjectIdV2 = async (projectId: string, environment: string, tx?: Knex) => { - const docs = await (tx || db)(TableName.Integration) + const docs = await (tx || db.replicaNode())(TableName.Integration) .where(`${TableName.Environment}.projectId`, projectId) .where("isActive", true) .where(`${TableName.Environment}.slug`, environment) @@ -120,7 +120,14 @@ export const integrationDALFactory = (db: TDbClient) => { db.ref("accessExpiresAt").withSchema(TableName.IntegrationAuth).as("accessExpiresAtAu"), db.ref("metadata").withSchema(TableName.IntegrationAuth).as("metadataAu"), db.ref("algorithm").withSchema(TableName.IntegrationAuth).as("algorithmAu"), - db.ref("keyEncoding").withSchema(TableName.IntegrationAuth).as("keyEncodingAu") + db.ref("keyEncoding").withSchema(TableName.IntegrationAuth).as("keyEncodingAu"), + db.ref("awsAssumeIamRoleArnCipherText").withSchema(TableName.IntegrationAuth), + db.ref("awsAssumeIamRoleArnIV").withSchema(TableName.IntegrationAuth), + db.ref("awsAssumeIamRoleArnTag").withSchema(TableName.IntegrationAuth), + db.ref("encryptedRefresh").withSchema(TableName.IntegrationAuth), + db.ref("encryptedAccess").withSchema(TableName.IntegrationAuth), + db.ref("encryptedAccessId").withSchema(TableName.IntegrationAuth), + db.ref("encryptedAwsAssumeIamRoleArn").withSchema(TableName.IntegrationAuth) ); return docs.map( ({ @@ -146,6 +153,13 @@ export const integrationDALFactory = (db: TDbClient) => { algorithmAu: algorithm, keyEncodingAu: keyEncoding, accessExpiresAtAu: accessExpiresAt, + awsAssumeIamRoleArnIV, + awsAssumeIamRoleArnCipherText, + awsAssumeIamRoleArnTag, + encryptedAccess, + encryptedRefresh, + encryptedAccessId, + encryptedAwsAssumeIamRoleArn, ...el }) => ({ ...el, @@ -174,7 +188,14 @@ export const integrationDALFactory = (db: TDbClient) => { metadata, algorithm, keyEncoding, - accessExpiresAt + accessExpiresAt, + awsAssumeIamRoleArnIV, + awsAssumeIamRoleArnCipherText, + awsAssumeIamRoleArnTag, + encryptedAccess, + encryptedRefresh, + encryptedAccessId, + encryptedAwsAssumeIamRoleArn } }) ); diff --git a/backend/src/services/integration/integration-schema.ts b/backend/src/services/integration/integration-schema.ts index 1ea01e56a8..99f1d996f0 100644 --- a/backend/src/services/integration/integration-schema.ts +++ b/backend/src/services/integration/integration-schema.ts @@ -5,14 +5,18 @@ import { INTEGRATION } from "@app/lib/api-docs"; import { IntegrationMappingBehavior } from "../integration-auth/integration-list"; export const IntegrationMetadataSchema = z.object({ + initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir), + secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix), secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix), - initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir), + mappingBehavior: z .nativeEnum(IntegrationMappingBehavior) .optional() .describe(INTEGRATION.CREATE.metadata.mappingBehavior), + shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy), + secretGCPLabel: z .object({ labelName: z.string(), @@ -20,6 +24,7 @@ export const IntegrationMetadataSchema = z.object({ }) .optional() .describe(INTEGRATION.CREATE.metadata.secretGCPLabel), + secretAWSTag: z .array( z.object({ @@ -29,7 +34,15 @@ export const IntegrationMetadataSchema = z.object({ ) .optional() .describe(INTEGRATION.CREATE.metadata.secretAWSTag), + + githubVisibility: z + .union([z.literal("selected"), z.literal("private"), z.literal("all")]) + .optional() + .describe(INTEGRATION.CREATE.metadata.githubVisibility), + githubVisibilityRepoIds: z.array(z.string()).optional().describe(INTEGRATION.CREATE.metadata.githubVisibilityRepoIds), + kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId), + shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete), shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete), shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets), diff --git a/backend/src/services/integration/integration-service.ts b/backend/src/services/integration/integration-service.ts index da9cfc71fa..12f4c77dec 100644 --- a/backend/src/services/integration/integration-service.ts +++ b/backend/src/services/integration/integration-service.ts @@ -2,16 +2,24 @@ import { ForbiddenError, subject } from "@casl/ability"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { BadRequestError } from "@app/lib/errors"; +import { NotFoundError } from "@app/lib/errors"; import { TProjectPermission } from "@app/lib/types"; import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth-dal"; +import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service"; +import { deleteIntegrationSecrets } from "../integration-auth/integration-delete-secret"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { TProjectBotServiceFactory } from "../project-bot/project-bot-service"; +import { TSecretDALFactory } from "../secret/secret-dal"; import { TSecretQueueFactory } from "../secret/secret-queue"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretImportDALFactory } from "../secret-import/secret-import-dal"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; import { TIntegrationDALFactory } from "./integration-dal"; import { TCreateIntegrationDTO, TDeleteIntegrationDTO, + TGetIntegrationDTO, TSyncIntegrationDTO, TUpdateIntegrationDTO } from "./integration-types"; @@ -19,9 +27,15 @@ import { type TIntegrationServiceFactoryDep = { integrationDAL: TIntegrationDALFactory; integrationAuthDAL: TIntegrationAuthDALFactory; - folderDAL: Pick; + integrationAuthService: TIntegrationAuthServiceFactory; + folderDAL: Pick; permissionService: Pick; + projectBotService: TProjectBotServiceFactory; secretQueueService: Pick; + secretV2BridgeDAL: Pick; + secretImportDAL: Pick; + kmsService: Pick; + secretDAL: Pick; }; export type TIntegrationServiceFactory = ReturnType; @@ -31,7 +45,13 @@ export const integrationServiceFactory = ({ integrationAuthDAL, folderDAL, permissionService, - secretQueueService + secretQueueService, + integrationAuthService, + projectBotService, + secretV2BridgeDAL, + secretImportDAL, + kmsService, + secretDAL }: TIntegrationServiceFactoryDep) => { const createIntegration = async ({ app, @@ -56,7 +76,8 @@ export const integrationServiceFactory = ({ targetEnvironmentId }: TCreateIntegrationDTO) => { const integrationAuth = await integrationAuthDAL.findById(integrationAuthId); - if (!integrationAuth) throw new BadRequestError({ message: "Integration auth not found" }); + if (!integrationAuth) + throw new NotFoundError({ message: `Integration auth with ID '${integrationAuthId}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -69,11 +90,18 @@ export const integrationServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment: sourceEnvironment, secretPath }) + subject(ProjectPermissionSub.Secrets, { + environment: sourceEnvironment, + secretPath + }) ); const folder = await folderDAL.findBySecretPath(integrationAuth.projectId, sourceEnvironment, secretPath); - if (!folder) throw new BadRequestError({ message: "Folder path not found" }); + if (!folder) { + throw new NotFoundError({ + message: `Folder with path '${secretPath}' not found in environment with slug'${sourceEnvironment}'` + }); + } const integration = await integrationDAL.create({ envId: folder.envId, @@ -100,7 +128,13 @@ export const integrationServiceFactory = ({ secretPath, projectId: integrationAuth.projectId }); - return { integration, integrationAuth }; + return { + integration: { + ...integration, + environment: folder.environment + }, + integrationAuth + }; }; const updateIntegration = async ({ @@ -119,7 +153,7 @@ export const integrationServiceFactory = ({ metadata }: TUpdateIntegrationDTO) => { const integration = await integrationDAL.findById(id); - if (!integration) throw new BadRequestError({ message: "Integration auth not found" }); + if (!integration) throw new NotFoundError({ message: `Integration with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -130,13 +164,25 @@ export const integrationServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations); - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) - ); + const newEnvironment = environment || integration.environment.slug; + const newSecretPath = secretPath || integration.secretPath; - const folder = await folderDAL.findBySecretPath(integration.projectId, environment, secretPath); - if (!folder) throw new BadRequestError({ message: "Folder path not found" }); + if (environment || secretPath) { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: newEnvironment, + secretPath: newSecretPath + }) + ); + } + + const folder = await folderDAL.findBySecretPath(integration.projectId, newEnvironment, newSecretPath); + if (!folder) { + throw new NotFoundError({ + message: `Folder with path '${newSecretPath}' not found in environment with slug '${newEnvironment}'` + }); + } const updatedIntegration = await integrationDAL.updateById(id, { envId: folder.envId, @@ -154,16 +200,53 @@ export const integrationServiceFactory = ({ await secretQueueService.syncIntegrations({ environment: folder.environment.slug, - secretPath, + secretPath: newSecretPath, projectId: folder.projectId }); - return updatedIntegration; + return { + ...updatedIntegration, + environment: folder.environment + }; }; - const deleteIntegration = async ({ actorId, id, actor, actorAuthMethod, actorOrgId }: TDeleteIntegrationDTO) => { + const getIntegration = async ({ id, actor, actorAuthMethod, actorId, actorOrgId }: TGetIntegrationDTO) => { const integration = await integrationDAL.findById(id); - if (!integration) throw new BadRequestError({ message: "Integration auth not found" }); + + if (!integration) { + throw new NotFoundError({ + message: `Integration with ID '${id}' not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + integration?.projectId || "", + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); + + if (!integration) { + throw new NotFoundError({ + message: `Integration with ID '${id}' not found` + }); + } + + return { ...integration, envId: integration.environment.id }; + }; + + const deleteIntegration = async ({ + actorId, + id, + actor, + actorAuthMethod, + actorOrgId, + shouldDeleteIntegrationSecrets + }: TDeleteIntegrationDTO) => { + const integration = await integrationDAL.findById(id); + if (!integration) throw new NotFoundError({ message: `Integration with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -174,27 +257,23 @@ export const integrationServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations); - const deletedIntegration = await integrationDAL.transaction(async (tx) => { - // delete integration - const deletedIntegrationResult = await integrationDAL.deleteById(id, tx); + const integrationAuth = await integrationAuthDAL.findById(integration.integrationAuthId); - // check if there are other integrations that share the same integration auth - const integrations = await integrationDAL.find( - { - integrationAuthId: integration.integrationAuthId - }, - tx - ); - - if (integrations.length === 0) { - // no other integration shares the same integration auth - // -> delete the integration auth - await integrationAuthDAL.deleteById(integration.integrationAuthId, tx); - } - - return deletedIntegrationResult; - }); + if (shouldDeleteIntegrationSecrets) { + await deleteIntegrationSecrets({ + integration, + integrationAuth, + projectBotService, + integrationAuthService, + secretV2BridgeDAL, + folderDAL, + secretImportDAL, + secretDAL, + kmsService + }); + } + const deletedIntegration = await integrationDAL.deleteById(id); return { ...integration, ...deletedIntegration }; }; @@ -221,7 +300,7 @@ export const integrationServiceFactory = ({ const syncIntegration = async ({ id, actorId, actor, actorOrgId, actorAuthMethod }: TSyncIntegrationDTO) => { const integration = await integrationDAL.findById(id); if (!integration) { - throw new BadRequestError({ message: "Integration not found" }); + throw new NotFoundError({ message: `Integration with ID '${id}' not found` }); } const { permission } = await permissionService.getProjectPermission( @@ -234,6 +313,8 @@ export const integrationServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations); await secretQueueService.syncIntegrations({ + isManual: true, + actorId, environment: integration.environment.slug, secretPath: integration.secretPath, projectId: integration.projectId @@ -247,6 +328,7 @@ export const integrationServiceFactory = ({ updateIntegration, deleteIntegration, listIntegrationByProject, + getIntegration, syncIntegration }; }; diff --git a/backend/src/services/integration/integration-types.ts b/backend/src/services/integration/integration-types.ts index abbccbe90b..a27c4f6acb 100644 --- a/backend/src/services/integration/integration-types.ts +++ b/backend/src/services/integration/integration-types.ts @@ -27,6 +27,10 @@ export type TCreateIntegrationDTO = { key: string; value: string; }[]; + + githubVisibility?: string; + githubVisibilityRepoIds?: string[]; + kmsKeyId?: string; shouldDisableDelete?: boolean; shouldMaskSecrets?: boolean; @@ -35,15 +39,19 @@ export type TCreateIntegrationDTO = { }; } & Omit; +export type TGetIntegrationDTO = { + id: string; +} & Omit; + export type TUpdateIntegrationDTO = { id: string; app?: string; appId?: string; isActive?: boolean; - secretPath: string; - targetEnvironment: string; - owner: string; - environment: string; + secretPath?: string; + targetEnvironment?: string; + owner?: string; + environment?: string; metadata?: { secretPrefix?: string; secretSuffix?: string; @@ -63,6 +71,7 @@ export type TUpdateIntegrationDTO = { export type TDeleteIntegrationDTO = { id: string; + shouldDeleteIntegrationSecrets?: boolean; } & Omit; export type TSyncIntegrationDTO = { diff --git a/backend/src/services/kms/internal-kms-dal.ts b/backend/src/services/kms/internal-kms-dal.ts new file mode 100644 index 0000000000..f038fc3dbb --- /dev/null +++ b/backend/src/services/kms/internal-kms-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TInternalKmsDALFactory = ReturnType; + +export const internalKmsDALFactory = (db: TDbClient) => { + const internalKmsOrm = ormify(db, TableName.InternalKms); + return internalKmsOrm; +}; diff --git a/backend/src/services/kms/kms-dal.ts b/backend/src/services/kms/kms-dal.ts deleted file mode 100644 index bee667e10a..0000000000 --- a/backend/src/services/kms/kms-dal.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; -import { ormify } from "@app/lib/knex"; - -export type TKmsDALFactory = ReturnType; - -export const kmsDALFactory = (db: TDbClient) => { - const kmsOrm = ormify(db, TableName.KmsKey); - return kmsOrm; -}; diff --git a/backend/src/services/kms/kms-fns.ts b/backend/src/services/kms/kms-fns.ts new file mode 100644 index 0000000000..06395272b2 --- /dev/null +++ b/backend/src/services/kms/kms-fns.ts @@ -0,0 +1,13 @@ +import { SymmetricEncryption } from "@app/lib/crypto/cipher"; + +export const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000"; + +export const getByteLengthForAlgorithm = (encryptionAlgorithm: SymmetricEncryption) => { + switch (encryptionAlgorithm) { + case SymmetricEncryption.AES_GCM_128: + return 16; + case SymmetricEncryption.AES_GCM_256: + default: + return 32; + } +}; diff --git a/backend/src/services/kms/kms-key-dal.ts b/backend/src/services/kms/kms-key-dal.ts new file mode 100644 index 0000000000..e0246c0964 --- /dev/null +++ b/backend/src/services/kms/kms-key-dal.ts @@ -0,0 +1,122 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { KmsKeysSchema, TableName, TInternalKms, TKmsKeys } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; +import { CmekOrderBy, TListCmeksByProjectIdDTO } from "@app/services/cmek/cmek-types"; + +export type TKmsKeyDALFactory = ReturnType; + +export const kmskeyDALFactory = (db: TDbClient) => { + const kmsOrm = ormify(db, TableName.KmsKey); + + // akhilmhdh: this function should never be called outside kms service + // why: because the encrypted key should never be shared with another service + const findByIdWithAssociatedKms = async (id: string, tx?: Knex) => { + try { + const result = await (tx || db.replicaNode())(TableName.KmsKey) + .where({ [`${TableName.KmsKey}.id` as "id"]: id }) + .join(TableName.Organization, `${TableName.KmsKey}.orgId`, `${TableName.Organization}.id`) + .leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`) + .leftJoin(TableName.ExternalKms, `${TableName.KmsKey}.id`, `${TableName.ExternalKms}.kmsKeyId`) + .first() + .select(selectAllTableCols(TableName.KmsKey)) + .select( + db.ref("id").withSchema(TableName.InternalKms).as("internalKmsId"), + db.ref("encryptedKey").withSchema(TableName.InternalKms).as("internalKmsEncryptedKey"), + db.ref("encryptionAlgorithm").withSchema(TableName.InternalKms).as("internalKmsEncryptionAlgorithm"), + db.ref("version").withSchema(TableName.InternalKms).as("internalKmsVersion"), + db.ref("id").withSchema(TableName.InternalKms).as("internalKmsId") + ) + .select( + db.ref("id").withSchema(TableName.ExternalKms).as("externalKmsId"), + db.ref("provider").withSchema(TableName.ExternalKms).as("externalKmsProvider"), + db.ref("encryptedProviderInputs").withSchema(TableName.ExternalKms).as("externalKmsEncryptedProviderInput"), + db.ref("status").withSchema(TableName.ExternalKms).as("externalKmsStatus"), + db.ref("statusDetails").withSchema(TableName.ExternalKms).as("externalKmsStatusDetails") + ) + .select( + db.ref("kmsDefaultKeyId").withSchema(TableName.Organization).as("orgKmsDefaultKeyId"), + db.ref("kmsEncryptedDataKey").withSchema(TableName.Organization).as("orgKmsEncryptedDataKey") + ); + + const data = { + ...KmsKeysSchema.parse(result), + isExternal: Boolean(result?.externalKmsId), + orgKms: { + id: result?.orgKmsDefaultKeyId, + encryptedDataKey: result?.orgKmsEncryptedDataKey + }, + externalKms: result?.externalKmsId + ? { + id: result.externalKmsId, + provider: result.externalKmsProvider, + encryptedProviderInput: result.externalKmsEncryptedProviderInput, + status: result.externalKmsStatus, + statusDetails: result.externalKmsStatusDetails + } + : undefined, + internalKms: result?.internalKmsId + ? { + id: result.internalKmsId, + encryptedKey: result.internalKmsEncryptedKey, + encryptionAlgorithm: result.internalKmsEncryptionAlgorithm, + version: result.internalKmsVersion + } + : undefined + }; + return data; + } catch (error) { + throw new DatabaseError({ error, name: "Find by id" }); + } + }; + + const findKmsKeysByProjectId = async ( + { + projectId, + offset = 0, + limit, + orderBy = CmekOrderBy.Name, + orderDirection = OrderByDirection.ASC, + search + }: TListCmeksByProjectIdDTO, + tx?: Knex + ) => { + try { + const query = (tx || db.replicaNode())(TableName.KmsKey) + .where("projectId", projectId) + .where((qb) => { + if (search) { + void qb.whereILike("name", `%${search}%`); + } + }) + .join(TableName.InternalKms, `${TableName.InternalKms}.kmsKeyId`, `${TableName.KmsKey}.id`) + .select< + (TKmsKeys & + Pick & { + total_count: number; + })[] + >( + selectAllTableCols(TableName.KmsKey), + db.raw(`count(*) OVER() as total_count`), + db.ref("encryptionAlgorithm").withSchema(TableName.InternalKms), + db.ref("version").withSchema(TableName.InternalKms) + ) + .orderBy(orderBy, orderDirection); + + if (limit) { + void query.limit(limit).offset(offset); + } + + const data = await query; + + return { keys: data, totalCount: Number(data?.[0]?.total_count ?? 0) }; + } catch (error) { + throw new DatabaseError({ error, name: "Find kms keys by project id" }); + } + }; + + return { ...kmsOrm, findByIdWithAssociatedKms, findKmsKeysByProjectId }; +}; diff --git a/backend/src/services/kms/kms-service.ts b/backend/src/services/kms/kms-service.ts index 97d2b29d67..007d33e617 100644 --- a/backend/src/services/kms/kms-service.ts +++ b/backend/src/services/kms/kms-service.ts @@ -1,86 +1,859 @@ -import { TKeyStoreFactory } from "@app/keystore/keystore"; +import slugify from "@sindresorhus/slugify"; +import { Knex } from "knex"; +import { z } from "zod"; + +import { KmsKeysSchema, TKmsRootConfig } from "@app/db/schemas"; +import { AwsKmsProviderFactory } from "@app/ee/services/external-kms/providers/aws-kms"; +import { + ExternalKmsAwsSchema, + KmsProviders, + TExternalKmsProviderFns +} from "@app/ee/services/external-kms/providers/model"; +import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service"; +import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { randomSecureBytes } from "@app/lib/crypto"; import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher"; -import { BadRequestError } from "@app/lib/errors"; +import { generateHash } from "@app/lib/crypto/encryption"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { getByteLengthForAlgorithm, KMS_ROOT_CONFIG_UUID } from "@app/services/kms/kms-fns"; -import { TKmsDALFactory } from "./kms-dal"; +import { TOrgDALFactory } from "../org/org-dal"; +import { TProjectDALFactory } from "../project/project-dal"; +import { TInternalKmsDALFactory } from "./internal-kms-dal"; +import { TKmsKeyDALFactory } from "./kms-key-dal"; import { TKmsRootConfigDALFactory } from "./kms-root-config-dal"; -import { TDecryptWithKmsDTO, TEncryptWithKmsDTO, TGenerateKMSDTO } from "./kms-types"; +import { + KmsDataKey, + KmsType, + RootKeyEncryptionStrategy, + TDecryptWithKeyDTO, + TDecryptWithKmsDTO, + TEncryptionWithKeyDTO, + TEncryptWithKmsDataKeyDTO, + TEncryptWithKmsDTO, + TGenerateKMSDTO, + TUpdateProjectSecretManagerKmsKeyDTO +} from "./kms-types"; type TKmsServiceFactoryDep = { - kmsDAL: TKmsDALFactory; - kmsRootConfigDAL: Pick; + kmsDAL: TKmsKeyDALFactory; + projectDAL: Pick; + orgDAL: Pick; + kmsRootConfigDAL: Pick; keyStore: Pick; + internalKmsDAL: Pick; + hsmService: THsmServiceFactory; }; export type TKmsServiceFactory = ReturnType; -const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000"; - const KMS_ROOT_CREATION_WAIT_KEY = "wait_till_ready_kms_root_key"; const KMS_ROOT_CREATION_WAIT_TIME = 10; // akhilmhdh: Don't edit this value. This is measured for blob concatination in kms const KMS_VERSION = "v01"; const KMS_VERSION_BLOB_LENGTH = 3; -export const kmsServiceFactory = ({ kmsDAL, kmsRootConfigDAL, keyStore }: TKmsServiceFactoryDep) => { +const KmsSanitizedSchema = KmsKeysSchema.extend({ isExternal: z.boolean() }); + +export const kmsServiceFactory = ({ + kmsDAL, + kmsRootConfigDAL, + keyStore, + internalKmsDAL, + orgDAL, + projectDAL, + hsmService +}: TKmsServiceFactoryDep) => { let ROOT_ENCRYPTION_KEY = Buffer.alloc(0); - // this is used symmetric encryption - const generateKmsKey = async ({ scopeId, scopeType, isReserved = true }: TGenerateKMSDTO) => { + /* + * Generate KMS Key + * This function is responsibile for generating the infisical internal KMS for various entities + * Like for secret manager, cert manager or for organization + */ + const generateKmsKey = async ({ + orgId, + isReserved = true, + tx, + name, + projectId, + encryptionAlgorithm = SymmetricEncryption.AES_GCM_256, + description + }: TGenerateKMSDTO) => { const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); - const kmsKeyMaterial = randomSecureBytes(32); - const encryptedKeyMaterial = cipher.encrypt(kmsKeyMaterial, ROOT_ENCRYPTION_KEY); - const { encryptedKey, ...doc } = await kmsDAL.create({ - version: 1, - encryptedKey: encryptedKeyMaterial, - encryptionAlgorithm: SymmetricEncryption.AES_GCM_256, - isReserved, - orgId: scopeType === "org" ? scopeId : undefined, - projectId: scopeType === "project" ? scopeId : undefined - }); + const kmsKeyMaterial = randomSecureBytes(getByteLengthForAlgorithm(encryptionAlgorithm)); + + const encryptedKeyMaterial = cipher.encrypt(kmsKeyMaterial, ROOT_ENCRYPTION_KEY); + const sanitizedName = name ? slugify(name) : slugify(alphaNumericNanoId(8).toLowerCase()); + const dbQuery = async (db: Knex) => { + const kmsDoc = await kmsDAL.create( + { + name: sanitizedName, + orgId, + isReserved, + projectId, + description + }, + db + ); + + await internalKmsDAL.create( + { + version: 1, + encryptedKey: encryptedKeyMaterial, + encryptionAlgorithm, + kmsKeyId: kmsDoc.id + }, + db + ); + return kmsDoc; + }; + if (tx) return dbQuery(tx); + const doc = await kmsDAL.transaction(async (tx2) => dbQuery(tx2)); return doc; }; - const encrypt = async ({ kmsId, plainText }: TEncryptWithKmsDTO) => { - const kmsDoc = await kmsDAL.findById(kmsId); - if (!kmsDoc) throw new BadRequestError({ message: "KMS ID not found" }); - // akhilmhdh: as more encryption are added do a check here on kmsDoc.encryptionAlgorithm - const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); - - const kmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY); - const encryptedPlainTextBlob = cipher.encrypt(plainText, kmsKey); - - // Buffer#1 encrypted text + Buffer#2 version number - const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3 - const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]); - return { cipherTextBlob }; + const deleteInternalKms = async (kmsId: string, orgId: string, tx?: Knex) => { + const kms = await kmsDAL.findByIdWithAssociatedKms(kmsId, tx); + if (kms.isExternal) return; + if (kms.orgId !== orgId) throw new ForbiddenRequestError({ message: "KMS doesn't belong to organization" }); + return kmsDAL.deleteById(kmsId, tx); }; - const decrypt = async ({ cipherTextBlob: versionedCipherTextBlob, kmsId }: TDecryptWithKmsDTO) => { - const kmsDoc = await kmsDAL.findById(kmsId); - if (!kmsDoc) throw new BadRequestError({ message: "KMS ID not found" }); + /* + * Simple encryption service function to do all the encryption tasks in infisical + * This can be even later exposed directly as api for encryption as function + * The encrypted binary even has everything into it. The IV, the version etc + */ + const encryptWithInputKey = async ({ key }: Omit) => { // akhilmhdh: as more encryption are added do a check here on kmsDoc.encryptionAlgorithm const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); - const kmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY); - - const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH); - const decryptedBlob = cipher.decrypt(cipherTextBlob, kmsKey); - return decryptedBlob; + return ({ plainText }: Pick) => { + const encryptedPlainTextBlob = cipher.encrypt(plainText, key); + // Buffer#1 encrypted text + Buffer#2 version number + const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3 + const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]); + return { cipherTextBlob }; + }; }; - const startService = async () => { + /* + * Simple decryption service function to do all the encryption tasks in infisical + * This can be even later exposed directly as api for encryption as function + */ + const decryptWithInputKey = async ({ key }: Omit) => { + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + + return ({ cipherTextBlob: versionedCipherTextBlob }: Pick) => { + const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH); + const decryptedBlob = cipher.decrypt(cipherTextBlob, key); + return decryptedBlob; + }; + }; + + /* + * Function to generate a KMS for an org + * We handle concurrent with redis locking and waitReady + * What happens is first we check kms is assigned else first we acquire lock and create the kms with connection + * In mean time the rest of the request will wait until creation is finished followed by getting the created on + * In real time this would be milliseconds + */ + const getOrgKmsKeyId = async (orgId: string, trx?: Knex) => { + let org = await orgDAL.findById(orgId, trx); + + if (!org) { + throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); + } + + if (!org.kmsDefaultKeyId) { + const lock = await keyStore + .acquireLock([KeyStorePrefixes.KmsOrgKeyCreation, orgId], 3000, { retryCount: 3 }) + .catch(() => null); + + try { + if (!lock) { + await keyStore.waitTillReady({ + key: `${KeyStorePrefixes.WaitUntilReadyKmsOrgKeyCreation}${orgId}`, + keyCheckCb: (val) => val === "true", + waitingCb: () => logger.info("KMS. Waiting for org key to be created") + }); + + org = await orgDAL.findById(orgId, trx); + } else { + const keyId = await (trx || orgDAL).transaction(async (tx) => { + org = await orgDAL.findById(orgId, tx); + if (org.kmsDefaultKeyId) { + return org.kmsDefaultKeyId; + } + + const key = await generateKmsKey({ + isReserved: true, + orgId: org.id, + tx + }); + + await orgDAL.updateById( + org.id, + { + kmsDefaultKeyId: key.id + }, + tx + ); + + await keyStore.setItemWithExpiry(`${KeyStorePrefixes.WaitUntilReadyKmsOrgKeyCreation}${orgId}`, 10, "true"); + + return key.id; + }); + + return keyId; + } + } finally { + await lock?.release(); + } + } + + if (!org.kmsDefaultKeyId) { + throw new Error("Invalid organization KMS"); + } + + return org.kmsDefaultKeyId; + }; + + const encryptWithRootKey = () => { + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + + return (plainTextBuffer: Buffer) => { + const encryptedBuffer = cipher.encrypt(plainTextBuffer, ROOT_ENCRYPTION_KEY); + return encryptedBuffer; + }; + }; + + const decryptWithRootKey = () => { + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + + return (cipherTextBuffer: Buffer) => { + return cipher.decrypt(cipherTextBuffer, ROOT_ENCRYPTION_KEY); + }; + }; + + const decryptWithKmsKey = async ({ + kmsId, + depth = 0, + tx + }: Omit & { depth?: number; tx?: Knex }) => { + if (depth > 2) throw new BadRequestError({ message: "KMS depth max limit" }); + + const kmsDoc = await kmsDAL.findByIdWithAssociatedKms(kmsId, tx); + if (!kmsDoc) { + throw new NotFoundError({ message: `KMS with ID '${kmsId}' not found` }); + } + + if (kmsDoc.externalKms) { + let externalKms: TExternalKmsProviderFns; + + if (!kmsDoc.orgKms.id || !kmsDoc.orgKms.encryptedDataKey) { + throw new Error("Invalid organization KMS"); + } + + // The idea is external kms connection info is encrypted by an org default KMS + // This could be external kms(in future) but at the end of the day, the end KMS will be an infisical internal one + // we put a limit of depth to avoid too many cycles + const orgKmsDecryptor = await decryptWithKmsKey({ + kmsId: kmsDoc.orgKms.id, + depth: depth + 1, + tx + }); + + const orgKmsDataKey = await orgKmsDecryptor({ + cipherTextBlob: kmsDoc.orgKms.encryptedDataKey + }); + + const kmsDecryptor = await decryptWithInputKey({ + key: orgKmsDataKey + }); + + const decryptedProviderInputBlob = kmsDecryptor({ + cipherTextBlob: kmsDoc.externalKms.encryptedProviderInput + }); + + switch (kmsDoc.externalKms.provider) { + case KmsProviders.Aws: { + const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync( + JSON.parse(decryptedProviderInputBlob.toString("utf8")) + ); + + externalKms = await AwsKmsProviderFactory({ + inputs: decryptedProviderInput + }); + break; + } + default: + throw new Error("Invalid KMS provider."); + } + + return async ({ cipherTextBlob }: Pick) => { + const { data } = await externalKms.decrypt(cipherTextBlob); + + return data; + }; + } + + // internal KMS + const keyCipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + const dataCipher = symmetricCipherService(kmsDoc.internalKms?.encryptionAlgorithm as SymmetricEncryption); + const kmsKey = keyCipher.decrypt(kmsDoc.internalKms?.encryptedKey as Buffer, ROOT_ENCRYPTION_KEY); + + return ({ cipherTextBlob: versionedCipherTextBlob }: Pick) => { + const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH); + const decryptedBlob = dataCipher.decrypt(cipherTextBlob, kmsKey); + return Promise.resolve(decryptedBlob); + }; + }; + + const encryptWithKmsKey = async ({ kmsId }: Omit, tx?: Knex) => { + const kmsDoc = await kmsDAL.findByIdWithAssociatedKms(kmsId, tx); + if (!kmsDoc) { + throw new NotFoundError({ message: `KMS with ID '${kmsId}' not found` }); + } + + if (kmsDoc.externalKms) { + let externalKms: TExternalKmsProviderFns; + if (!kmsDoc.orgKms.id || !kmsDoc.orgKms.encryptedDataKey) { + throw new Error("Invalid organization KMS"); + } + + const orgKmsDecryptor = await decryptWithKmsKey({ + kmsId: kmsDoc.orgKms.id + }); + + const orgKmsDataKey = await orgKmsDecryptor({ + cipherTextBlob: kmsDoc.orgKms.encryptedDataKey + }); + + const kmsDecryptor = await decryptWithInputKey({ + key: orgKmsDataKey + }); + + const decryptedProviderInputBlob = kmsDecryptor({ + cipherTextBlob: kmsDoc.externalKms.encryptedProviderInput + }); + + switch (kmsDoc.externalKms.provider) { + case KmsProviders.Aws: { + const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync( + JSON.parse(decryptedProviderInputBlob.toString("utf8")) + ); + + externalKms = await AwsKmsProviderFactory({ + inputs: decryptedProviderInput + }); + break; + } + default: + throw new Error("Invalid KMS provider."); + } + + return async ({ plainText }: Pick) => { + const { encryptedBlob } = await externalKms.encrypt(plainText); + + return { cipherTextBlob: encryptedBlob }; + }; + } + + // internal KMS + const keyCipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + const dataCipher = symmetricCipherService(kmsDoc.internalKms?.encryptionAlgorithm as SymmetricEncryption); + return ({ plainText }: Pick) => { + const kmsKey = keyCipher.decrypt(kmsDoc.internalKms?.encryptedKey as Buffer, ROOT_ENCRYPTION_KEY); + const encryptedPlainTextBlob = dataCipher.encrypt(plainText, kmsKey); + + // Buffer#1 encrypted text + Buffer#2 version number + const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3 + const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]); + + return Promise.resolve({ cipherTextBlob }); + }; + }; + + const $getOrgKmsDataKey = async (orgId: string, trx?: Knex) => { + const kmsKeyId = await getOrgKmsKeyId(orgId, trx); + let org = await orgDAL.findById(orgId, trx); + + if (!org) { + throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); + } + + if (!org.kmsEncryptedDataKey) { + const lock = await keyStore + .acquireLock([KeyStorePrefixes.KmsOrgDataKeyCreation, orgId], 500, { retryCount: 0 }) + .catch(() => null); + + try { + if (!lock) { + await keyStore.waitTillReady({ + key: `${KeyStorePrefixes.WaitUntilReadyKmsOrgDataKeyCreation}${orgId}`, + keyCheckCb: (val) => val === "true", + waitingCb: () => logger.info("KMS. Waiting for org data key to be created") + }); + + org = await orgDAL.findById(orgId, trx); + } else { + const orgDataKey = await (trx || orgDAL).transaction(async (tx) => { + org = await orgDAL.findById(orgId, tx); + if (org.kmsEncryptedDataKey) { + return; + } + + const dataKey = randomSecureBytes(); + const kmsEncryptor = await encryptWithKmsKey( + { + kmsId: kmsKeyId + }, + tx + ); + + const { cipherTextBlob } = await kmsEncryptor({ + plainText: dataKey + }); + + await orgDAL.updateById( + org.id, + { + kmsEncryptedDataKey: cipherTextBlob + }, + tx + ); + + await keyStore.setItemWithExpiry( + `${KeyStorePrefixes.WaitUntilReadyKmsOrgDataKeyCreation}${orgId}`, + 10, + "true" + ); + + return dataKey; + }); + + if (orgDataKey) { + return orgDataKey; + } + } + } finally { + await lock?.release(); + } + } + + if (!org.kmsEncryptedDataKey) { + throw new Error("Invalid organization KMS"); + } + + const kmsDecryptor = await decryptWithKmsKey({ + kmsId: kmsKeyId + }); + + return kmsDecryptor({ + cipherTextBlob: org.kmsEncryptedDataKey + }); + }; + + const getProjectSecretManagerKmsKeyId = async (projectId: string, trx?: Knex) => { + let project = await projectDAL.findById(projectId, trx); + if (!project) { + throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); + } + + if (!project.kmsSecretManagerKeyId) { + const lock = await keyStore + .acquireLock([KeyStorePrefixes.KmsProjectKeyCreation, projectId], 3000, { retryCount: 0 }) + .catch(() => null); + + try { + if (!lock) { + await keyStore.waitTillReady({ + key: `${KeyStorePrefixes.WaitUntilReadyKmsProjectKeyCreation}${projectId}`, + keyCheckCb: (val) => val === "true", + waitingCb: () => logger.debug("KMS. Waiting for project key to be created"), + delay: 500 + }); + + project = await projectDAL.findById(projectId); + } else { + const kmsKeyId = await (trx || projectDAL).transaction(async (tx) => { + project = await projectDAL.findById(projectId, tx); + if (project.kmsSecretManagerKeyId) { + return project.kmsSecretManagerKeyId; + } + + const key = await generateKmsKey({ + isReserved: true, + orgId: project.orgId, + tx + }); + + await projectDAL.updateById( + projectId, + { + kmsSecretManagerKeyId: key.id + }, + tx + ); + + return key.id; + }); + + await keyStore.setItemWithExpiry( + `${KeyStorePrefixes.WaitUntilReadyKmsProjectKeyCreation}${projectId}`, + 10, + "true" + ); + + return kmsKeyId; + } + } finally { + await lock?.release(); + } + } + + if (!project.kmsSecretManagerKeyId) { + throw new Error("Missing project KMS key ID"); + } + + return project.kmsSecretManagerKeyId; + }; + + const $getProjectSecretManagerKmsDataKey = async (projectId: string, trx?: Knex) => { + const kmsKeyId = await getProjectSecretManagerKmsKeyId(projectId, trx); + let project = await projectDAL.findById(projectId, trx); + + if (!project.kmsSecretManagerEncryptedDataKey) { + const lock = await keyStore + .acquireLock([KeyStorePrefixes.KmsProjectDataKeyCreation, projectId], 3000, { retryCount: 0 }) + .catch(() => null); + + try { + if (!lock) { + await keyStore.waitTillReady({ + key: `${KeyStorePrefixes.WaitUntilReadyKmsProjectDataKeyCreation}${projectId}`, + keyCheckCb: (val) => val === "true", + waitingCb: () => logger.debug("KMS. Waiting for secret manager data key to be created"), + delay: 500 + }); + + project = await projectDAL.findById(projectId, trx); + } else { + const projectDataKey = await (trx || projectDAL).transaction(async (tx) => { + project = await projectDAL.findById(projectId, tx); + if (project.kmsSecretManagerEncryptedDataKey) { + return; + } + + const dataKey = randomSecureBytes(); + const kmsEncryptor = await encryptWithKmsKey( + { + kmsId: kmsKeyId + }, + tx + ); + + const { cipherTextBlob } = await kmsEncryptor({ + plainText: dataKey + }); + + await projectDAL.updateById( + projectId, + { + kmsSecretManagerEncryptedDataKey: cipherTextBlob + }, + tx + ); + + await keyStore.setItemWithExpiry( + `${KeyStorePrefixes.WaitUntilReadyKmsProjectDataKeyCreation}${projectId}`, + 10, + "true" + ); + return dataKey; + }); + + if (projectDataKey) { + return projectDataKey; + } + } + } finally { + await lock?.release(); + } + } + + if (!project.kmsSecretManagerEncryptedDataKey) { + throw new Error("Missing project data key"); + } + + const kmsDecryptor = await decryptWithKmsKey({ + kmsId: kmsKeyId, + tx: trx + }); + + return kmsDecryptor({ + cipherTextBlob: project.kmsSecretManagerEncryptedDataKey + }); + }; + + const $getDataKey = async (dto: TEncryptWithKmsDataKeyDTO, trx?: Knex) => { + switch (dto.type) { + case KmsDataKey.SecretManager: { + return $getProjectSecretManagerKmsDataKey(dto.projectId, trx); + } + default: { + return $getOrgKmsDataKey(dto.orgId, trx); + } + } + }; + + const $getBasicEncryptionKey = () => { const appCfg = getConfig(); - // This will switch to a seal process and HMS flow in future + const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY; - // if root key its base64 encoded const isBase64 = !appCfg.ENCRYPTION_KEY; - if (!encryptionKey) throw new Error("Root encryption key not found for KMS service."); + if (!encryptionKey) + throw new Error( + "Root encryption key not found for KMS service. Did you set the ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY environment variables?" + ); + const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8"); + return encryptionKeyBuffer; + }; + + const $decryptRootKey = async (kmsRootConfig: TKmsRootConfig) => { + // case 1: root key is encrypted with HSM + if (kmsRootConfig.encryptionStrategy === RootKeyEncryptionStrategy.HSM) { + const hsmIsActive = await hsmService.isActive(); + if (!hsmIsActive) { + throw new Error("Unable to decrypt root KMS key. HSM service is inactive. Did you configure the HSM?"); + } + + const decryptedKey = await hsmService.decrypt(kmsRootConfig.encryptedRootKey); + return decryptedKey; + } + + // case 2: root key is encrypted with software encryption + if (kmsRootConfig.encryptionStrategy === RootKeyEncryptionStrategy.Software) { + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + const encryptionKeyBuffer = $getBasicEncryptionKey(); + + return cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer); + } + + throw new Error(`Invalid root key encryption strategy: ${kmsRootConfig.encryptionStrategy}`); + }; + + const $encryptRootKey = async (plainKeyBuffer: Buffer, strategy: RootKeyEncryptionStrategy) => { + if (strategy === RootKeyEncryptionStrategy.HSM) { + const hsmIsActive = await hsmService.isActive(); + if (!hsmIsActive) { + throw new Error("Unable to encrypt root KMS key. HSM service is inactive. Did you configure the HSM?"); + } + const encrypted = await hsmService.encrypt(plainKeyBuffer); + return encrypted; + } + + if (strategy === RootKeyEncryptionStrategy.Software) { + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + const encryptionKeyBuffer = $getBasicEncryptionKey(); + + return cipher.encrypt(plainKeyBuffer, encryptionKeyBuffer); + } + + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + throw new Error(`Invalid root key encryption strategy: ${strategy}`); + }; + + // by keeping the decrypted data key in inner scope + // none of the entities outside can interact directly or expose the data key + // NOTICE: If changing here update migrations/utils/kms + const createCipherPairWithDataKey = async (encryptionContext: TEncryptWithKmsDataKeyDTO, trx?: Knex) => { + const dataKey = await $getDataKey(encryptionContext, trx); + + const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + + return { + encryptor: ({ plainText }: Pick) => { + const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey); + + // Buffer#1 encrypted text + Buffer#2 version number + const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3 + const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]); + return { cipherTextBlob }; + }, + decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: Pick) => { + const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH); + const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey); + return decryptedBlob; + } + }; + }; + + const updateProjectSecretManagerKmsKey = async ({ projectId, kms }: TUpdateProjectSecretManagerKmsKeyDTO) => { + const kmsKeyId = await getProjectSecretManagerKmsKeyId(projectId); + const currentKms = await kmsDAL.findById(kmsKeyId); + + // case: internal kms -> internal kms. no change needed + if (kms.type === KmsType.Internal && currentKms.isReserved) { + return KmsSanitizedSchema.parseAsync({ isExternal: false, ...currentKms }); + } + + if (kms.type === KmsType.External) { + // validate kms is scoped in org + const { kmsId } = kms; + const project = await projectDAL.findById(projectId); + if (!project) { + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found` + }); + } + const kmsDoc = await kmsDAL.findByIdWithAssociatedKms(kmsId); + if (!kmsDoc) { + throw new NotFoundError({ message: `KMS with ID '${kmsId}' not found` }); + } + + if (kmsDoc.orgId !== project.orgId) { + throw new ForbiddenRequestError({ + message: "KMS ID does not belong in the organization." + }); + } + } + + const dataKey = await $getProjectSecretManagerKmsDataKey(projectId); + return kmsDAL.transaction(async (tx) => { + const project = await projectDAL.findById(projectId, tx); + let kmsId; + if (kms.type === KmsType.Internal) { + const internalKms = await generateKmsKey({ + isReserved: true, + orgId: project.orgId, + tx + }); + kmsId = internalKms.id; + } else { + kmsId = kms.kmsId; + } + + const kmsEncryptor = await encryptWithKmsKey({ kmsId }, tx); + const { cipherTextBlob } = await kmsEncryptor({ plainText: dataKey }); + await projectDAL.updateById( + projectId, + { + kmsSecretManagerKeyId: kmsId, + kmsSecretManagerEncryptedDataKey: cipherTextBlob + }, + tx + ); + if (currentKms.isReserved) { + await kmsDAL.deleteById(currentKms.id, tx); + } + const newKms = await kmsDAL.findById(kmsId, tx); + return KmsSanitizedSchema.parseAsync({ isExternal: !currentKms.isReserved, ...newKms }); + }); + }; + + const getProjectKeyBackup = async (projectId: string) => { + const project = await projectDAL.findById(projectId); + if (!project) { + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found` + }); + } + + const secretManagerDataKey = await $getProjectSecretManagerKmsDataKey(projectId); + const kmsKeyIdForEncrypt = await getOrgKmsKeyId(project.orgId); + const kmsEncryptor = await encryptWithKmsKey({ kmsId: kmsKeyIdForEncrypt }); + const { cipherTextBlob: encryptedSecretManagerDataKeyWithOrgKms } = await kmsEncryptor({ + plainText: secretManagerDataKey + }); + + // backup format: version.projectId.kmsFunction.kmsId.Base64(encryptedDataKey).verificationHash + let secretManagerBackup = `v1.${projectId}.secretManager.${kmsKeyIdForEncrypt}.${encryptedSecretManagerDataKeyWithOrgKms.toString( + "base64" + )}`; + + const verificationHash = generateHash(secretManagerBackup); + secretManagerBackup = `${secretManagerBackup}.${verificationHash}`; + + return { + secretManager: secretManagerBackup + }; + }; + + const loadProjectKeyBackup = async (projectId: string, backup: string) => { + const project = await projectDAL.findById(projectId); + if (!project) { + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found` + }); + } + + const [, backupProjectId, , backupKmsKeyId, backupBase64EncryptedDataKey, backupHash] = backup.split("."); + const computedHash = generateHash(backup.substring(0, backup.lastIndexOf("."))); + if (computedHash !== backupHash) { + throw new BadRequestError({ + message: "Invalid backup" + }); + } + + if (backupProjectId !== projectId) { + throw new ForbiddenRequestError({ + message: "Backup does not belong to project" + }); + } + + const kmsDecryptor = await decryptWithKmsKey({ kmsId: backupKmsKeyId }); + const dataKey = await kmsDecryptor({ + cipherTextBlob: Buffer.from(backupBase64EncryptedDataKey, "base64") + }); + + const newKms = await kmsDAL.transaction(async (tx) => { + const key = await generateKmsKey({ + isReserved: true, + orgId: project.orgId, + tx + }); + + const kmsEncryptor = await encryptWithKmsKey({ kmsId: key.id }, tx); + const { cipherTextBlob } = await kmsEncryptor({ plainText: dataKey }); + + await projectDAL.updateById( + projectId, + { + kmsSecretManagerKeyId: key.id, + kmsSecretManagerEncryptedDataKey: cipherTextBlob + }, + tx + ); + return kmsDAL.findByIdWithAssociatedKms(key.id, tx); + }); + + return { + secretManagerKmsKey: newKms + }; + }; + + const getKmsById = async (kmsKeyId: string, tx?: Knex) => { + const kms = await kmsDAL.findByIdWithAssociatedKms(kmsKeyId, tx); + + if (!kms.id) { + throw new NotFoundError({ + message: `KMS with ID '${kmsKeyId}' not found` + }); + } + const { id, name, orgId, isExternal } = kms; + return { id, name, orgId, isExternal }; + }; + + // akhilmhdh: a copy of this is made in migrations/utils/kms + const startService = async () => { const lock = await keyStore.acquireLock([`KMS_ROOT_CFG_LOCK`], 3000, { retryCount: 3 }).catch(() => null); if (!lock) { await keyStore.waitTillReady({ @@ -92,35 +865,86 @@ export const kmsServiceFactory = ({ kmsDAL, kmsRootConfigDAL, keyStore }: TKmsSe // check if KMS root key was already generated and saved in DB const kmsRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID); - const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256); + + // case 1: a root key already exists in the DB if (kmsRootConfig) { if (lock) await lock.release(); - logger.info("KMS: Encrypted ROOT Key found from DB. Decrypting."); - const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer); - // set the flag so that other instancen nodes can start + logger.info(`KMS: Encrypted ROOT Key found from DB. Decrypting. [strategy=${kmsRootConfig.encryptionStrategy}]`); + + const decryptedRootKey = await $decryptRootKey(kmsRootConfig); + + // set the flag so that other instance nodes can start await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true"); logger.info("KMS: Loading ROOT Key into Memory."); ROOT_ENCRYPTION_KEY = decryptedRootKey; return; } - logger.info("KMS: Generating ROOT Key"); + // case 2: no config is found, so we create a new root key with basic encryption + logger.info("KMS: Generating new ROOT Key"); const newRootKey = randomSecureBytes(32); - const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer); - // @ts-expect-error id is kept as fixed for idempotence and to avoid race condition - await kmsRootConfigDAL.create({ encryptedRootKey, id: KMS_ROOT_CONFIG_UUID }); + const encryptedRootKey = await $encryptRootKey(newRootKey, RootKeyEncryptionStrategy.Software).catch((err) => { + logger.error({ hsmEnabled: hsmService.isActive() }, "KMS: Failed to encrypt ROOT Key"); + throw err; + }); - // set the flag so that other instancen nodes can start + await kmsRootConfigDAL.create({ + // @ts-expect-error id is kept as fixed for idempotence and to avoid race condition + id: KMS_ROOT_CONFIG_UUID, + encryptedRootKey, + encryptionStrategy: RootKeyEncryptionStrategy.Software + }); + + // set the flag so that other instance nodes can start await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true"); logger.info("KMS: Saved and loaded ROOT Key into memory"); if (lock) await lock.release(); ROOT_ENCRYPTION_KEY = newRootKey; }; + const updateEncryptionStrategy = async (strategy: RootKeyEncryptionStrategy) => { + const kmsRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID); + if (!kmsRootConfig) { + throw new NotFoundError({ message: "KMS root config not found" }); + } + + if (kmsRootConfig.encryptionStrategy === strategy) { + return; + } + + const decryptedRootKey = await $decryptRootKey(kmsRootConfig); + const encryptedRootKey = await $encryptRootKey(decryptedRootKey, strategy); + + if (!encryptedRootKey) { + logger.error("KMS: Failed to re-encrypt ROOT Key with selected strategy"); + throw new Error("Failed to re-encrypt ROOT Key with selected strategy"); + } + + await kmsRootConfigDAL.updateById(KMS_ROOT_CONFIG_UUID, { + encryptedRootKey, + encryptionStrategy: strategy + }); + + ROOT_ENCRYPTION_KEY = decryptedRootKey; + }; + return { startService, generateKmsKey, - encrypt, - decrypt + deleteInternalKms, + encryptWithKmsKey, + decryptWithKmsKey, + encryptWithInputKey, + decryptWithInputKey, + encryptWithRootKey, + decryptWithRootKey, + getOrgKmsKeyId, + updateEncryptionStrategy, + getProjectSecretManagerKmsKeyId, + updateProjectSecretManagerKmsKey, + getProjectKeyBackup, + loadProjectKeyBackup, + getKmsById, + createCipherPairWithDataKey }; }; diff --git a/backend/src/services/kms/kms-types.ts b/backend/src/services/kms/kms-types.ts index 96ad25f6e0..f655d4b5d1 100644 --- a/backend/src/services/kms/kms-types.ts +++ b/backend/src/services/kms/kms-types.ts @@ -1,7 +1,35 @@ +import { Knex } from "knex"; + +import { SymmetricEncryption } from "@app/lib/crypto/cipher"; + +export enum KmsDataKey { + Organization, + SecretManager + // CertificateManager +} + +export enum KmsType { + External = "external", + Internal = "internal" +} + +export type TEncryptWithKmsDataKeyDTO = + | { type: KmsDataKey.Organization; orgId: string } + | { type: KmsDataKey.SecretManager; projectId: string }; +// akhilmhdh: not implemented yet +// | { +// type: KmsDataKey.CertificateManager; +// projectId: string; +// }; + export type TGenerateKMSDTO = { - scopeType: "project" | "org"; - scopeId: string; + orgId: string; + projectId?: string; + encryptionAlgorithm?: SymmetricEncryption; isReserved?: boolean; + name?: string; + description?: string; + tx?: Knex; }; export type TEncryptWithKmsDTO = { @@ -9,7 +37,27 @@ export type TEncryptWithKmsDTO = { plainText: Buffer; }; +export type TEncryptionWithKeyDTO = { + key: Buffer; + plainText: Buffer; +}; + export type TDecryptWithKmsDTO = { kmsId: string; cipherTextBlob: Buffer; }; + +export type TDecryptWithKeyDTO = { + key: Buffer; + cipherTextBlob: Buffer; +}; + +export type TUpdateProjectSecretManagerKmsKeyDTO = { + projectId: string; + kms: { type: KmsType.Internal } | { type: KmsType.External; kmsId: string }; +}; + +export enum RootKeyEncryptionStrategy { + Software = "SOFTWARE", + HSM = "HSM" +} diff --git a/backend/src/services/org-admin/org-admin-dal.ts b/backend/src/services/org-admin/org-admin-dal.ts new file mode 100644 index 0000000000..da2ccf2f60 --- /dev/null +++ b/backend/src/services/org-admin/org-admin-dal.ts @@ -0,0 +1,5 @@ +export type TOrgAdminDALFactory = ReturnType; + +export const orgAdminDALFactory = () => { + return {}; +}; diff --git a/backend/src/services/org-admin/org-admin-service.ts b/backend/src/services/org-admin/org-admin-service.ts new file mode 100644 index 0000000000..c9f7929789 --- /dev/null +++ b/backend/src/services/org-admin/org-admin-service.ts @@ -0,0 +1,191 @@ +import { ForbiddenError } from "@casl/ability"; + +import { ProjectMembershipRole, ProjectVersion, SecretKeyEncoding } from "@app/db/schemas"; +import { OrgPermissionAdminConsoleAction, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; + +import { TProjectDALFactory } from "../project/project-dal"; +import { assignWorkspaceKeysToMembers } from "../project/project-fns"; +import { TProjectBotDALFactory } from "../project-bot/project-bot-dal"; +import { TProjectKeyDALFactory } from "../project-key/project-key-dal"; +import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal"; +import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal"; +import { TUserDALFactory } from "../user/user-dal"; +import { TAccessProjectDTO, TListOrgProjectsDTO } from "./org-admin-types"; + +type TOrgAdminServiceFactoryDep = { + permissionService: Pick; + projectDAL: Pick; + projectMembershipDAL: Pick; + projectKeyDAL: Pick; + projectBotDAL: Pick; + userDAL: Pick; + projectUserMembershipRoleDAL: Pick; +}; + +export type TOrgAdminServiceFactory = ReturnType; + +export const orgAdminServiceFactory = ({ + permissionService, + projectDAL, + projectMembershipDAL, + projectKeyDAL, + projectBotDAL, + userDAL, + projectUserMembershipRoleDAL +}: TOrgAdminServiceFactoryDep) => { + const listOrgProjects = async ({ + actor, + limit, + actorId, + offset, + search, + actorOrgId, + actorAuthMethod + }: TListOrgProjectsDTO) => { + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionAdminConsoleAction.AccessAllProjects, + OrgPermissionSubjects.AdminConsole + ); + const projects = await projectDAL.find( + { + orgId: actorOrgId, + $search: { + name: search ? `%${search}%` : undefined + } + }, + { offset, limit, sort: [["name", "asc"]], count: true } + ); + + const count = projects?.[0]?.count ? parseInt(projects?.[0]?.count, 10) : 0; + return { projects, count }; + }; + + const grantProjectAdminAccess = async ({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + projectId + }: TAccessProjectDTO) => { + const { permission, membership } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionAdminConsoleAction.AccessAllProjects, + OrgPermissionSubjects.AdminConsole + ); + + const project = await projectDAL.findById(projectId); + if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); + + if (project.version === ProjectVersion.V1) { + throw new BadRequestError({ message: "Please upgrade your project on your dashboard" }); + } + + // check already there exist a membership if there return it + const projectMembership = await projectMembershipDAL.findOne({ + projectId, + userId: actorId + }); + if (projectMembership) { + // reset and make the user admin + await projectMembershipDAL.transaction(async (tx) => { + await projectUserMembershipRoleDAL.delete({ projectMembershipId: projectMembership.id }, tx); + await projectUserMembershipRoleDAL.create( + { + projectMembershipId: projectMembership.id, + role: ProjectMembershipRole.Admin + }, + tx + ); + }); + return { isExistingMember: true, membership: projectMembership }; + } + + // missing membership thus add admin back as admin to project + const ghostUser = await projectDAL.findProjectGhostUser(projectId); + if (!ghostUser) { + throw new NotFoundError({ + message: `Project owner of project with ID '${projectId}' not found` + }); + } + + const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId); + if (!ghostUserLatestKey) { + throw new NotFoundError({ + message: `Project owner's latest key of project with ID '${projectId}' not found` + }); + } + + const bot = await projectBotDAL.findOne({ projectId }); + if (!bot) { + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found` + }); + } + + const botPrivateKey = infisicalSymmetricDecrypt({ + keyEncoding: bot.keyEncoding as SecretKeyEncoding, + iv: bot.iv, + tag: bot.tag, + ciphertext: bot.encryptedPrivateKey + }); + + const userEncryptionKey = await userDAL.findUserEncKeyByUserId(actorId); + if (!userEncryptionKey) + throw new NotFoundError({ message: `User encryption key for user with ID '${actorId}' not found` }); + const [newWsMember] = assignWorkspaceKeysToMembers({ + decryptKey: ghostUserLatestKey, + userPrivateKey: botPrivateKey, + members: [ + { + orgMembershipId: membership.id, + userPublicKey: userEncryptionKey.publicKey + } + ] + }); + + const updatedMembership = await projectMembershipDAL.transaction(async (tx) => { + const newProjectMembership = await projectMembershipDAL.create( + { + projectId, + userId: actorId + }, + tx + ); + await projectUserMembershipRoleDAL.create( + { projectMembershipId: newProjectMembership.id, role: ProjectMembershipRole.Admin }, + tx + ); + + await projectKeyDAL.create( + { + encryptedKey: newWsMember.workspaceEncryptedKey, + nonce: newWsMember.workspaceEncryptedNonce, + senderId: ghostUser.id, + receiverId: actorId, + projectId + }, + tx + ); + return newProjectMembership; + }); + return { isExistingMember: false, membership: updatedMembership }; + }; + + return { listOrgProjects, grantProjectAdminAccess }; +}; diff --git a/backend/src/services/org-admin/org-admin-types.ts b/backend/src/services/org-admin/org-admin-types.ts new file mode 100644 index 0000000000..85669fc568 --- /dev/null +++ b/backend/src/services/org-admin/org-admin-types.ts @@ -0,0 +1,11 @@ +import { TOrgPermission } from "@app/lib/types"; + +export type TListOrgProjectsDTO = { + limit?: number; + offset?: number; + search?: string; +} & Omit; + +export type TAccessProjectDTO = { + projectId: string; +} & Omit; diff --git a/backend/src/services/org-membership/org-membership-dal.ts b/backend/src/services/org-membership/org-membership-dal.ts index 9990d9c3dd..68b1172028 100644 --- a/backend/src/services/org-membership/org-membership-dal.ts +++ b/backend/src/services/org-membership/org-membership-dal.ts @@ -1,13 +1,110 @@ import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; -import { ormify } from "@app/lib/knex"; +import { TableName, TUserEncryptionKeys } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, sqlNestRelationships } from "@app/lib/knex"; export type TOrgMembershipDALFactory = ReturnType; export const orgMembershipDALFactory = (db: TDbClient) => { const orgMembershipOrm = ormify(db, TableName.OrgMembership); + const findOrgMembershipById = async (membershipId: string) => { + try { + const member = await db + .replicaNode()(TableName.OrgMembership) + .where(`${TableName.OrgMembership}.id`, membershipId) + .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) + .leftJoin( + TableName.UserEncryptionKey, + `${TableName.UserEncryptionKey}.userId`, + `${TableName.Users}.id` + ) + .leftJoin(TableName.IdentityMetadata, (queryBuilder) => { + void queryBuilder + .on(`${TableName.OrgMembership}.userId`, `${TableName.IdentityMetadata}.userId`) + .andOn(`${TableName.OrgMembership}.orgId`, `${TableName.IdentityMetadata}.orgId`); + }) + .select( + db.ref("id").withSchema(TableName.OrgMembership), + db.ref("inviteEmail").withSchema(TableName.OrgMembership), + db.ref("orgId").withSchema(TableName.OrgMembership), + db.ref("role").withSchema(TableName.OrgMembership), + db.ref("roleId").withSchema(TableName.OrgMembership), + db.ref("status").withSchema(TableName.OrgMembership), + db.ref("isActive").withSchema(TableName.OrgMembership), + db.ref("email").withSchema(TableName.Users), + db.ref("username").withSchema(TableName.Users), + db.ref("firstName").withSchema(TableName.Users), + db.ref("lastName").withSchema(TableName.Users), + db.ref("isEmailVerified").withSchema(TableName.Users), + db.ref("id").withSchema(TableName.Users).as("userId"), + db.ref("publicKey").withSchema(TableName.UserEncryptionKey), + db.ref("id").withSchema(TableName.IdentityMetadata).as("metadataId"), + db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"), + db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue") + ) + .where({ isGhost: false }); // MAKE SURE USER IS NOT A GHOST USER + + if (!member) return undefined; + + const doc = sqlNestRelationships({ + data: member, + key: "id", + parentMapper: ({ + email, + isEmailVerified, + username, + firstName, + lastName, + userId, + publicKey, + roleId, + orgId, + id, + role, + status, + isActive, + inviteEmail + }) => ({ + roleId, + orgId, + id, + role, + status, + isActive, + inviteEmail, + user: { + id: userId, + email, + isEmailVerified, + username, + firstName, + lastName, + userId, + publicKey + } + }), + childrenMapper: [ + { + key: "metadataId", + label: "metadata" as const, + mapper: ({ metadataKey, metadataValue, metadataId }) => ({ + id: metadataId, + key: metadataKey, + value: metadataValue + }) + } + ] + }); + + return doc?.[0]; + } catch (error) { + throw new DatabaseError({ error, name: "Find org membership by id" }); + } + }; + return { - ...orgMembershipOrm + ...orgMembershipOrm, + findOrgMembershipById }; }; diff --git a/backend/src/services/org/incident-contacts-dal.ts b/backend/src/services/org/incident-contacts-dal.ts index 1979a9c3e5..9db87b5175 100644 --- a/backend/src/services/org/incident-contacts-dal.ts +++ b/backend/src/services/org/incident-contacts-dal.ts @@ -16,7 +16,7 @@ export const incidentContactDALFactory = (db: TDbClient) => { const findByOrgId = async (orgId: string) => { try { - const incidentContacts = await db(TableName.IncidentContact).where({ orgId }); + const incidentContacts = await db.replicaNode()(TableName.IncidentContact).where({ orgId }); return incidentContacts; } catch (error) { throw new DatabaseError({ name: "Incident contact list", error }); @@ -25,7 +25,8 @@ export const incidentContactDALFactory = (db: TDbClient) => { const findOne = async (orgId: string, data: Partial) => { try { - const incidentContacts = await db(TableName.IncidentContact) + const incidentContacts = await db + .replicaNode()(TableName.IncidentContact) .where({ orgId, ...data }) .first(); return incidentContacts; diff --git a/backend/src/services/org/org-dal.ts b/backend/src/services/org/org-dal.ts index 1e52053b2a..24f1d55b0d 100644 --- a/backend/src/services/org/org-dal.ts +++ b/backend/src/services/org/org-dal.ts @@ -12,6 +12,7 @@ import { } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt, withTransaction } from "@app/lib/knex"; +import { generateKnexQueryFromScim } from "@app/lib/knex/scim"; export type TOrgDALFactory = ReturnType; @@ -20,7 +21,7 @@ export const orgDALFactory = (db: TDbClient) => { const findOrgById = async (orgId: string) => { try { - const org = await db(TableName.Organization).where({ id: orgId }).first(); + const org = await db.replicaNode()(TableName.Organization).where({ id: orgId }).first(); return org; } catch (error) { throw new DatabaseError({ error, name: "Find org by id" }); @@ -28,12 +29,37 @@ export const orgDALFactory = (db: TDbClient) => { }; // special query - const findAllOrgsByUserId = async (userId: string): Promise => { + const findAllOrgsByUserId = async (userId: string): Promise<(TOrganizations & { orgAuthMethod: string })[]> => { try { - const org = await db(TableName.OrgMembership) + const org = (await db + .replicaNode()(TableName.OrgMembership) .where({ userId }) .join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`) - .select(selectAllTableCols(TableName.Organization)); + .leftJoin(TableName.SamlConfig, (qb) => { + qb.on(`${TableName.SamlConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn( + `${TableName.SamlConfig}.isActive`, + "=", + db.raw("true") + ); + }) + .leftJoin(TableName.OidcConfig, (qb) => { + qb.on(`${TableName.OidcConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn( + `${TableName.OidcConfig}.isActive`, + "=", + db.raw("true") + ); + }) + .select(selectAllTableCols(TableName.Organization)) + .select( + db.raw(` + CASE + WHEN ${TableName.SamlConfig}."orgId" IS NOT NULL THEN 'saml' + WHEN ${TableName.OidcConfig}."orgId" IS NOT NULL THEN 'oidc' + ELSE '' + END as "orgAuthMethod" + `) + )) as (TOrganizations & { orgAuthMethod: string })[]; + return org; } catch (error) { throw new DatabaseError({ error, name: "Find all org by user id" }); @@ -42,7 +68,8 @@ export const orgDALFactory = (db: TDbClient) => { const findOrgByProjectId = async (projectId: string): Promise => { try { - const [org] = await db(TableName.Project) + const [org] = await db + .replicaNode()(TableName.Project) .where({ [`${TableName.Project}.id` as "id"]: projectId }) .join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`) .select(selectAllTableCols(TableName.Organization)); @@ -56,7 +83,8 @@ export const orgDALFactory = (db: TDbClient) => { // special query const findAllOrgMembers = async (orgId: string) => { try { - const members = await db(TableName.OrgMembership) + const members = await db + .replicaNode()(TableName.OrgMembership) .where(`${TableName.OrgMembership}.orgId`, orgId) .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) .leftJoin( @@ -71,19 +99,26 @@ export const orgDALFactory = (db: TDbClient) => { db.ref("role").withSchema(TableName.OrgMembership), db.ref("roleId").withSchema(TableName.OrgMembership), db.ref("status").withSchema(TableName.OrgMembership), + db.ref("isActive").withSchema(TableName.OrgMembership), db.ref("email").withSchema(TableName.Users), + db.ref("isEmailVerified").withSchema(TableName.Users), db.ref("username").withSchema(TableName.Users), db.ref("firstName").withSchema(TableName.Users), db.ref("lastName").withSchema(TableName.Users), db.ref("id").withSchema(TableName.Users).as("userId"), + db.ref("superAdmin").withSchema(TableName.Users), db.ref("publicKey").withSchema(TableName.UserEncryptionKey) ) - .where({ isGhost: false }); // MAKE SURE USER IS NOT A GHOST USER + .where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER + .orderBy("firstName") + .orderBy("lastName"); - return members.map(({ email, username, firstName, lastName, userId, publicKey, ...data }) => ({ - ...data, - user: { email, username, firstName, lastName, id: userId, publicKey } - })); + return members.map( + ({ email, isEmailVerified, username, firstName, lastName, userId, publicKey, superAdmin, ...data }) => ({ + ...data, + user: { email, isEmailVerified, username, firstName, lastName, id: userId, publicKey, superAdmin } + }) + ); } catch (error) { throw new DatabaseError({ error, name: "Find all org members" }); } @@ -95,7 +130,8 @@ export const orgDALFactory = (db: TDbClient) => { count: string; } - const count = await db(TableName.OrgMembership) + const count = await db + .replicaNode()(TableName.OrgMembership) .where(`${TableName.OrgMembership}.orgId`, orgId) .count("*") .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) @@ -108,9 +144,11 @@ export const orgDALFactory = (db: TDbClient) => { } }; - const findOrgMembersByUsername = async (orgId: string, usernames: string[]) => { + const findOrgMembersByUsername = async (orgId: string, usernames: string[], tx?: Knex) => { try { - const members = await db(TableName.OrgMembership) + const conn = tx || db; + const members = await conn(TableName.OrgMembership) + // .replicaNode()(TableName.OrgMembership) .where(`${TableName.OrgMembership}.orgId`, orgId) .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) .leftJoin( @@ -119,18 +157,18 @@ export const orgDALFactory = (db: TDbClient) => { `${TableName.Users}.id` ) .select( - db.ref("id").withSchema(TableName.OrgMembership), - db.ref("inviteEmail").withSchema(TableName.OrgMembership), - db.ref("orgId").withSchema(TableName.OrgMembership), - db.ref("role").withSchema(TableName.OrgMembership), - db.ref("roleId").withSchema(TableName.OrgMembership), - db.ref("status").withSchema(TableName.OrgMembership), - db.ref("username").withSchema(TableName.Users), - db.ref("email").withSchema(TableName.Users), - db.ref("firstName").withSchema(TableName.Users), - db.ref("lastName").withSchema(TableName.Users), - db.ref("id").withSchema(TableName.Users).as("userId"), - db.ref("publicKey").withSchema(TableName.UserEncryptionKey) + conn.ref("id").withSchema(TableName.OrgMembership), + conn.ref("inviteEmail").withSchema(TableName.OrgMembership), + conn.ref("orgId").withSchema(TableName.OrgMembership), + conn.ref("role").withSchema(TableName.OrgMembership), + conn.ref("roleId").withSchema(TableName.OrgMembership), + conn.ref("status").withSchema(TableName.OrgMembership), + conn.ref("username").withSchema(TableName.Users), + conn.ref("email").withSchema(TableName.Users), + conn.ref("firstName").withSchema(TableName.Users), + conn.ref("lastName").withSchema(TableName.Users), + conn.ref("id").withSchema(TableName.Users).as("userId"), + conn.ref("publicKey").withSchema(TableName.UserEncryptionKey) ) .where({ isGhost: false }) .whereIn("username", usernames); @@ -145,7 +183,8 @@ export const orgDALFactory = (db: TDbClient) => { const findOrgGhostUser = async (orgId: string) => { try { - const member = await db(TableName.OrgMembership) + const member = await db + .replicaNode()(TableName.OrgMembership) .where({ orgId }) .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) .leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`) @@ -169,7 +208,8 @@ export const orgDALFactory = (db: TDbClient) => { const ghostUserExists = async (orgId: string) => { try { - const member = await db(TableName.OrgMembership) + const member = await db + .replicaNode()(TableName.OrgMembership) .where({ orgId }) .join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`) .leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`) @@ -200,9 +240,9 @@ export const orgDALFactory = (db: TDbClient) => { } }; - const updateById = async (orgId: string, data: Partial) => { + const updateById = async (orgId: string, data: Partial, tx?: Knex) => { try { - const [org] = await db(TableName.Organization) + const [org] = await (tx || db)(TableName.Organization) .where({ id: orgId }) .update({ ...data }) .returning("*"); @@ -257,7 +297,7 @@ export const orgDALFactory = (db: TDbClient) => { { offset, limit, sort, tx }: TFindOpt = {} ) => { try { - const query = (tx || db)(TableName.OrgMembership) + const query = (tx || db.replicaNode())(TableName.OrgMembership) // eslint-disable-next-line .where(buildFindFilter(filter)) .join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`) @@ -270,6 +310,7 @@ export const orgDALFactory = (db: TDbClient) => { .select( selectAllTableCols(TableName.OrgMembership), db.ref("email").withSchema(TableName.Users), + db.ref("isEmailVerified").withSchema(TableName.Users), db.ref("username").withSchema(TableName.Users), db.ref("firstName").withSchema(TableName.Users), db.ref("lastName").withSchema(TableName.Users), @@ -290,6 +331,67 @@ export const orgDALFactory = (db: TDbClient) => { } }; + const findMembershipWithScimFilter = async ( + orgId: string, + scimFilter: string | undefined, + { offset, limit, sort, tx }: TFindOpt = {} + ) => { + try { + const query = (tx || db.replicaNode())(TableName.OrgMembership) + // eslint-disable-next-line + .where(`${TableName.OrgMembership}.orgId`, orgId) + .where((qb) => { + if (scimFilter) { + void generateKnexQueryFromScim(qb, scimFilter, (attrPath) => { + switch (attrPath) { + case "active": + return `${TableName.OrgMembership}.isActive`; + case "userName": + return `${TableName.UserAliases}.externalId`; + case "name.givenName": + return `${TableName.Users}.firstName`; + case "name.familyName": + return `${TableName.Users}.lastName`; + case "email.value": + return `${TableName.Users}.email`; + default: + return null; + } + }); + } + }) + .join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`) + .join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`) + .leftJoin(TableName.UserAliases, function joinUserAlias() { + this.on(`${TableName.UserAliases}.userId`, "=", `${TableName.OrgMembership}.userId`) + .andOn(`${TableName.UserAliases}.orgId`, "=", `${TableName.OrgMembership}.orgId`) + .andOn(`${TableName.UserAliases}.aliasType`, "=", (tx || db).raw("?", ["saml"])); + }) + .select( + selectAllTableCols(TableName.OrgMembership), + db.ref("email").withSchema(TableName.Users), + db.ref("isEmailVerified").withSchema(TableName.Users), + db.ref("username").withSchema(TableName.Users), + db.ref("firstName").withSchema(TableName.Users), + db.ref("lastName").withSchema(TableName.Users), + db.ref("scimEnabled").withSchema(TableName.Organization), + db.ref("defaultMembershipRole").withSchema(TableName.Organization), + db.ref("externalId").withSchema(TableName.UserAliases) + ) + .where({ isGhost: false }); + + if (limit) void query.limit(limit); + if (offset) void query.offset(offset); + if (sort) { + void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls }))); + } + const res = await query; + return res; + } catch (error) { + throw new DatabaseError({ error, name: "Find one" }); + } + }; + return withTransaction(db, { ...orgOrm, findOrgByProjectId, @@ -304,6 +406,7 @@ export const orgDALFactory = (db: TDbClient) => { updateById, deleteById, findMembership, + findMembershipWithScimFilter, createMembership, updateMembershipById, deleteMembershipById, diff --git a/backend/src/services/org/org-fns.ts b/backend/src/services/org/org-fns.ts index a63ffabee8..559ccc7db3 100644 --- a/backend/src/services/org/org-fns.ts +++ b/backend/src/services/org/org-fns.ts @@ -1,4 +1,5 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { TProjectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal"; import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; @@ -12,6 +13,7 @@ type TDeleteOrgMembership = { projectKeyDAL: Pick; userAliasDAL: Pick; licenseService: Pick; + projectUserAdditionalPrivilegeDAL: Pick; }; export const deleteOrgMembershipFn = async ({ @@ -19,6 +21,7 @@ export const deleteOrgMembershipFn = async ({ orgId, orgDAL, projectMembershipDAL, + projectUserAdditionalPrivilegeDAL, projectKeyDAL, userAliasDAL, licenseService @@ -39,6 +42,13 @@ export const deleteOrgMembershipFn = async ({ tx ); + await projectUserAdditionalPrivilegeDAL.delete( + { + userId: orgMembership.userId + }, + tx + ); + // Get all the project memberships of the user in the organization const projectMemberships = await projectMembershipDAL.findProjectMembershipsByUserId(orgId, orgMembership.userId); diff --git a/backend/src/services/org/org-role-fns.ts b/backend/src/services/org/org-role-fns.ts new file mode 100644 index 0000000000..5bff1e324a --- /dev/null +++ b/backend/src/services/org/org-role-fns.ts @@ -0,0 +1,57 @@ +import { OrgMembershipRole } from "@app/db/schemas"; +import { TFeatureSet } from "@app/ee/services/license/license-types"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { TOrgRoleDALFactory } from "@app/services/org/org-role-dal"; + +const RESERVED_ORG_ROLE_SLUGS = Object.values(OrgMembershipRole).filter((role) => role !== "custom"); + +export const isCustomOrgRole = (roleSlug: string) => !RESERVED_ORG_ROLE_SLUGS.find((r) => r === roleSlug); + +// this is only for updating an org +export const getDefaultOrgMembershipRoleForUpdateOrg = async ({ + membershipRoleSlug, + orgRoleDAL, + plan, + orgId +}: { + orgId: string; + membershipRoleSlug: string; + orgRoleDAL: TOrgRoleDALFactory; + plan: TFeatureSet; +}) => { + if (isCustomOrgRole(membershipRoleSlug)) { + if (!plan?.rbac) + throw new BadRequestError({ + message: + "Failed to set custom default role due to plan RBAC restriction. Upgrade plan to set custom default org membership role." + }); + + const customRole = await orgRoleDAL.findOne({ slug: membershipRoleSlug, orgId }); + if (!customRole) { + throw new NotFoundError({ + name: "UpdateOrg", + message: `Organization role with slug '${membershipRoleSlug}' not found` + }); + } + + // use ID for default role + return customRole.id; + } + + // not custom, use reserved slug + return membershipRoleSlug; +}; + +// this is only for creating an org membership +export const getDefaultOrgMembershipRole = async ( + defaultOrgMembershipRole: string // can either be ID or reserved slug +) => { + if (isCustomOrgRole(defaultOrgMembershipRole)) + return { + roleId: defaultOrgMembershipRole, + role: OrgMembershipRole.Custom + }; + + // will be reserved slug + return { roleId: undefined, role: defaultOrgMembershipRole as OrgMembershipRole }; +}; diff --git a/backend/src/services/org/org-role-service.ts b/backend/src/services/org/org-role-service.ts index 70c54ff183..1243055eb7 100644 --- a/backend/src/services/org/org-role-service.ts +++ b/backend/src/services/org/org-role-service.ts @@ -10,7 +10,9 @@ import { OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { TExternalGroupOrgRoleMappingDALFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-dal"; +import { TOrgDALFactory } from "@app/services/org/org-dal"; import { ActorAuthMethod } from "../auth/auth-type"; import { TOrgRoleDALFactory } from "./org-role-dal"; @@ -18,11 +20,18 @@ import { TOrgRoleDALFactory } from "./org-role-dal"; type TOrgRoleServiceFactoryDep = { orgRoleDAL: TOrgRoleDALFactory; permissionService: TPermissionServiceFactory; + orgDAL: TOrgDALFactory; + externalGroupOrgRoleMappingDAL: TExternalGroupOrgRoleMappingDALFactory; }; export type TOrgRoleServiceFactory = ReturnType; -export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRoleServiceFactoryDep) => { +export const orgRoleServiceFactory = ({ + orgRoleDAL, + orgDAL, + permissionService, + externalGroupOrgRoleMappingDAL +}: TOrgRoleServiceFactoryDep) => { const createRole = async ( userId: string, orgId: string, @@ -42,6 +51,61 @@ export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRol return role; }; + const getRole = async ( + userId: string, + orgId: string, + roleId: string, + actorAuthMethod: ActorAuthMethod, + actorOrgId: string | undefined + ) => { + const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Role); + + switch (roleId) { + case "b11b49a9-09a9-4443-916a-4246f9ff2c69": { + return { + id: roleId, + orgId, + name: "Admin", + slug: "admin", + description: "Complete administration access over the organization", + permissions: packRules(orgAdminPermissions), + createdAt: new Date(), + updatedAt: new Date() + }; + } + case "b11b49a9-09a9-4443-916a-4246f9ff2c70": { + return { + id: roleId, + orgId, + name: "Member", + slug: "member", + description: "Non-administrative role in an organization", + permissions: packRules(orgMemberPermissions), + createdAt: new Date(), + updatedAt: new Date() + }; + } + case "b10d49a9-09a9-4443-916a-4246f9ff2c72": { + return { + id: "b10d49a9-09a9-4443-916a-4246f9ff2c72", // dummy user for zod validation in response + orgId, + name: "No Access", + slug: "no-access", + description: "No access to any resources in the organization", + permissions: packRules(orgNoAccessPermissions), + createdAt: new Date(), + updatedAt: new Date() + }; + } + default: { + const role = await orgRoleDAL.findOne({ id: roleId, orgId }); + if (!role) throw new NotFoundError({ message: `Organization role with ID '${roleId}' not found` }); + return role; + } + } + }; + const updateRole = async ( userId: string, orgId: string, @@ -61,7 +125,7 @@ export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRol { id: roleId, orgId }, { ...data, permissions: data.permissions ? JSON.stringify(data.permissions) : undefined } ); - if (!updatedRole) throw new BadRequestError({ message: "Role not found", name: "Update role" }); + if (!updatedRole) throw new NotFoundError({ message: `Organization role with ID '${roleId}' not found` }); return updatedRole; }; @@ -74,8 +138,33 @@ export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRol ) => { const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Role); + + const org = await orgDAL.findOrgById(orgId); + + if (!org) + throw new NotFoundError({ + message: `Organization with ID '${orgId}' not found` + }); + + if (org.defaultMembershipRole === roleId) + throw new BadRequestError({ + message: "Cannot delete default org membership role. Please re-assign and try again." + }); + + const externalGroupMapping = await externalGroupOrgRoleMappingDAL.findOne({ + orgId, + roleId + }); + + if (externalGroupMapping) + throw new BadRequestError({ + message: + "Cannot delete role assigned to external group organization role mapping. Please re-assign external mapping and try again." + }); + const [deletedRole] = await orgRoleDAL.delete({ id: roleId, orgId }); - if (!deletedRole) throw new BadRequestError({ message: "Role not found", name: "Update role" }); + if (!deletedRole) + throw new NotFoundError({ message: `Organization role with ID '${roleId}' not found`, name: "UpdateRole" }); return deletedRole; }; @@ -96,7 +185,7 @@ export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRol name: "Admin", slug: "admin", description: "Complete administration access over the organization", - permissions: packRules(orgAdminPermissions.rules), + permissions: packRules(orgAdminPermissions), createdAt: new Date(), updatedAt: new Date() }, @@ -106,7 +195,7 @@ export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRol name: "Member", slug: "member", description: "Non-administrative role in an organization", - permissions: packRules(orgMemberPermissions.rules), + permissions: packRules(orgMemberPermissions), createdAt: new Date(), updatedAt: new Date() }, @@ -116,7 +205,7 @@ export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRol name: "No Access", slug: "no-access", description: "No access to any resources in the organization", - permissions: packRules(orgNoAccessPermissions.rules), + permissions: packRules(orgNoAccessPermissions), createdAt: new Date(), updatedAt: new Date() }, @@ -144,5 +233,5 @@ export const orgRoleServiceFactory = ({ orgRoleDAL, permissionService }: TOrgRol return { permissions: packRules(permission.rules), membership }; }; - return { createRole, updateRole, deleteRole, listRoles, getUserPermission }; + return { createRole, getRole, updateRole, deleteRole, listRoles, getUserPermission }; }; diff --git a/backend/src/services/org/org-service.ts b/backend/src/services/org/org-service.ts index ceeb888dfc..1d656c4d5c 100644 --- a/backend/src/services/org/org-service.ts +++ b/backend/src/services/org/org-service.ts @@ -4,28 +4,50 @@ import crypto from "crypto"; import jwt from "jsonwebtoken"; import { Knex } from "knex"; -import { OrgMembershipRole, OrgMembershipStatus, TableName } from "@app/db/schemas"; +import { + OrgMembershipRole, + OrgMembershipStatus, + ProjectMembershipRole, + ProjectVersion, + SecretKeyEncoding, + TableName, + TProjectMemberships, + TProjectUserMembershipRolesInsert, + TUsers +} from "@app/db/schemas"; import { TProjects } from "@app/db/schemas/projects"; import { TGroupDALFactory } from "@app/ee/services/group/group-dal"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { TProjectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal"; import { TSamlConfigDALFactory } from "@app/ee/services/saml-config/saml-config-dal"; import { getConfig } from "@app/lib/config/env"; import { generateAsymmetricKeyPair } from "@app/lib/crypto"; -import { generateSymmetricKey, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; +import { generateSymmetricKey, infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; import { generateUserSrpKeys } from "@app/lib/crypto/srp"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { groupBy } from "@app/lib/fn"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { isDisposableEmail } from "@app/lib/validator"; +import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns"; +import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; import { ActorAuthMethod, ActorType, AuthMethod, AuthTokenType } from "../auth/auth-type"; import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service"; import { TokenType } from "../auth-token/auth-token-types"; +import { TIdentityMetadataDALFactory } from "../identity/identity-metadata-dal"; import { TProjectDALFactory } from "../project/project-dal"; +import { assignWorkspaceKeysToMembers, createProjectKey } from "../project/project-fns"; +import { TProjectBotDALFactory } from "../project-bot/project-bot-dal"; +import { TProjectBotServiceFactory } from "../project-bot/project-bot-service"; import { TProjectKeyDALFactory } from "../project-key/project-key-dal"; import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal"; +import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal"; +import { TProjectRoleDALFactory } from "../project-role/project-role-dal"; import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; import { TUserDALFactory } from "../user/user-dal"; import { TIncidentContactsDALFactory } from "./incident-contacts-dal"; @@ -38,7 +60,9 @@ import { TFindAllWorkspacesDTO, TFindOrgMembersByEmailDTO, TGetOrgGroupsDTO, + TGetOrgMembershipDTO, TInviteUserToOrgDTO, + TListProjectMembershipsByOrgMembershipIdDTO, TUpdateOrgDTO, TUpdateOrgMembershipDTO, TVerifyUserToOrgDTO @@ -52,10 +76,16 @@ type TOrgServiceFactoryDep = { userDAL: TUserDALFactory; groupDAL: TGroupDALFactory; projectDAL: TProjectDALFactory; - projectMembershipDAL: Pick; - projectKeyDAL: Pick; + identityMetadataDAL: Pick; + projectMembershipDAL: Pick< + TProjectMembershipDALFactory, + "findProjectMembershipsByUserId" | "delete" | "create" | "find" | "insertMany" | "transaction" + >; + projectKeyDAL: Pick; + orgMembershipDAL: Pick; incidentContactDAL: TIncidentContactsDALFactory; samlConfigDAL: Pick; + oidcConfigDAL: Pick; smtpService: TSmtpService; tokenService: TAuthTokenServiceFactory; permissionService: TPermissionServiceFactory; @@ -63,6 +93,11 @@ type TOrgServiceFactoryDep = { TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount" | "generateOrgCustomerId" | "removeOrgCustomer" >; + projectUserAdditionalPrivilegeDAL: Pick; + projectRoleDAL: Pick; + projectBotDAL: Pick; + projectUserMembershipRoleDAL: Pick; + projectBotService: Pick; }; export type TOrgServiceFactory = ReturnType; @@ -79,10 +114,18 @@ export const orgServiceFactory = ({ projectDAL, projectMembershipDAL, projectKeyDAL, + orgMembershipDAL, + projectUserAdditionalPrivilegeDAL, tokenService, orgBotDAL, licenseService, - samlConfigDAL + projectRoleDAL, + samlConfigDAL, + oidcConfigDAL, + projectBotDAL, + projectUserMembershipRoleDAL, + identityMetadataDAL, + projectBotService }: TOrgServiceFactoryDep) => { /* * Get organization details by the organization id @@ -95,7 +138,7 @@ export const orgServiceFactory = ({ ) => { await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId); const org = await orgDAL.findOrgById(orgId); - if (!org) throw new BadRequestError({ name: "Org not found", message: "Organization not found" }); + if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); return org; }; /* @@ -144,10 +187,7 @@ export const orgServiceFactory = ({ return members; }; - const findAllWorkspaces = async ({ actor, actorId, actorOrgId, actorAuthMethod, orgId }: TFindAllWorkspacesDTO) => { - const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); - ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace); - + const findAllWorkspaces = async ({ actor, actorId, orgId }: TFindAllWorkspacesDTO) => { const organizationWorkspaceIds = new Set((await projectDAL.find({ orgId })).map((workspace) => workspace.id)); let workspaces: (TProjects & { organization: string } & { @@ -207,7 +247,8 @@ export const orgServiceFactory = ({ orgId, userId: user.id, role: OrgMembershipRole.Admin, - status: OrgMembershipStatus.Accepted + status: OrgMembershipStatus.Accepted, + isActive: true }; await orgDAL.createMembership(createMembershipData, tx); @@ -227,18 +268,32 @@ export const orgServiceFactory = ({ actorOrgId, actorAuthMethod, orgId, - data: { name, slug, authEnforced, scimEnabled } + data: { name, slug, authEnforced, scimEnabled, defaultMembershipRoleSlug, enforceMfa, selectedMfaMethod } }: TUpdateOrgDTO) => { + const appCfg = getConfig(); const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings); const plan = await licenseService.getPlan(orgId); - if (authEnforced !== undefined) { - if (!plan?.samlSSO) + if (enforceMfa !== undefined) { + if (!plan.enforceMfa) { throw new BadRequestError({ - message: - "Failed to enforce/un-enforce SAML SSO due to plan restriction. Upgrade plan to enforce/un-enforce SAML SSO." + message: "Failed to enforce user MFA due to plan restriction. Upgrade plan to enforce/un-enforce MFA." + }); + } + + if (!appCfg.isSmtpConfigured) { + throw new BadRequestError({ + message: "Failed to enforce user MFA due to missing instance SMTP configuration." + }); + } + } + + if (authEnforced !== undefined) { + if (!plan?.samlSSO && !plan.oidcSSO) + throw new BadRequestError({ + message: "Failed to enforce/un-enforce SSO due to plan restriction. Upgrade plan to enforce/un-enforce SSO." }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso); } @@ -254,20 +309,34 @@ export const orgServiceFactory = ({ if (authEnforced) { const samlCfg = await samlConfigDAL.findEnforceableSamlCfg(orgId); - if (!samlCfg) - throw new BadRequestError({ - name: "No enforceable SAML config found", - message: "No enforceable SAML config found" + const oidcCfg = await oidcConfigDAL.findEnforceableOidcCfg(orgId); + + if (!samlCfg && !oidcCfg) + throw new NotFoundError({ + message: `SAML or OIDC configuration for organization with ID '${orgId}' not found` }); } + let defaultMembershipRole: string | undefined; + if (defaultMembershipRoleSlug) { + defaultMembershipRole = await getDefaultOrgMembershipRoleForUpdateOrg({ + membershipRoleSlug: defaultMembershipRoleSlug, + orgId, + orgRoleDAL, + plan + }); + } + const org = await orgDAL.updateById(orgId, { name, slug: slug ? slugify(slug) : undefined, authEnforced, - scimEnabled + scimEnabled, + defaultMembershipRole, + enforceMfa, + selectedMfaMethod }); - if (!org) throw new BadRequestError({ name: "Org not found", message: "Organization not found" }); + if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); return org; }; /* @@ -311,11 +380,11 @@ export const orgServiceFactory = ({ userId, orgId: org.id, role: OrgMembershipRole.Admin, - status: OrgMembershipStatus.Accepted + status: OrgMembershipStatus.Accepted, + isActive: true }, tx ); - await licenseService.updateSubscriptionOrgMemberCount(org.id); await orgBotDAL.create( { name: org.name, @@ -337,6 +406,7 @@ export const orgServiceFactory = ({ return org; }); + await licenseService.updateSubscriptionOrgMemberCount(organization.id); return organization; }; @@ -351,7 +421,10 @@ export const orgServiceFactory = ({ ) => { const { membership } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId); if ((membership.role as OrgMembershipRole) !== OrgMembershipRole.Admin) - throw new UnauthorizedError({ name: "Delete org by id", message: "Not an admin" }); + throw new ForbiddenRequestError({ + name: "DeleteOrganizationById", + message: "Insufficient privileges" + }); const organization = await orgDAL.deleteById(orgId); if (organization.customerId) { @@ -365,19 +438,31 @@ export const orgServiceFactory = ({ * */ const updateOrgMembership = async ({ role, + isActive, orgId, userId, membershipId, actorAuthMethod, - actorOrgId + actorOrgId, + metadata }: TUpdateOrgMembershipDTO) => { const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Member); + const foundMembership = await orgMembershipDAL.findById(membershipId); + if (!foundMembership) + throw new NotFoundError({ message: `Organization membership with ID ${membershipId} not found` }); + if (foundMembership.orgId !== orgId) + throw new UnauthorizedError({ message: "Updated org member doesn't belong to the organization" }); + if (foundMembership.userId === userId) + throw new UnauthorizedError({ message: "Cannot update own organization membership" }); + const isCustomRole = !Object.values(OrgMembershipRole).includes(role as OrgMembershipRole); - if (isCustomRole) { + let userRole = role; + let userRoleId: string | null = null; + if (role && isCustomRole) { const customRole = await orgRoleDAL.findOne({ slug: role, orgId }); - if (!customRole) throw new BadRequestError({ name: "Update membership", message: "Role not found" }); + if (!customRole) throw new BadRequestError({ name: "UpdateMembership", message: "Organization role not found" }); const plan = await licenseService.getPlan(orgId); if (!plan?.rbac) @@ -385,17 +470,31 @@ export const orgServiceFactory = ({ message: "Failed to assign custom role due to RBAC restriction. Upgrade plan to assign custom role to member." }); - const [membership] = await orgDAL.updateMembership( - { id: membershipId, orgId }, - { - role: OrgMembershipRole.Custom, - roleId: customRole.id - } - ); - return membership; + userRole = OrgMembershipRole.Custom; + userRoleId = customRole.id; } + const membership = await orgDAL.transaction(async (tx) => { + const [updatedOrgMembership] = await orgDAL.updateMembership( + { id: membershipId, orgId }, + { role: userRole, roleId: userRoleId, isActive } + ); - const [membership] = await orgDAL.updateMembership({ id: membershipId, orgId }, { role, roleId: null }); + if (metadata) { + await identityMetadataDAL.delete({ userId: updatedOrgMembership.userId, orgId }, tx); + if (metadata.length) { + await identityMetadataDAL.insertMany( + metadata.map(({ key, value }) => ({ + userId: updatedOrgMembership.userId, + orgId, + key, + value + })), + tx + ); + } + } + return updatedOrgMembership; + }); return membership; }; /* @@ -403,121 +502,439 @@ export const orgServiceFactory = ({ */ const inviteUserToOrganization = async ({ orgId, - userId, - inviteeEmail, + actorId, + actor, + inviteeEmails, + organizationRoleSlug, + projects: invitedProjects, actorAuthMethod, actorOrgId }: TInviteUserToOrgDTO) => { - const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId); - ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Member); + const appCfg = getConfig(); + + const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); const org = await orgDAL.findOrgById(orgId); - if (org?.authEnforced) { + const isEmailInvalid = await isDisposableEmail(inviteeEmails); + if (isEmailInvalid) { throw new BadRequestError({ - message: "Failed to invite user due to org-level auth enforced for organization" + message: "Disposable emails are not allowed", + name: "InviteUser" + }); + } + const plan = await licenseService.getPlan(orgId); + const isCustomOrgRole = !Object.values(OrgMembershipRole).includes(organizationRoleSlug as OrgMembershipRole); + if (isCustomOrgRole) { + if (!plan?.rbac) + throw new BadRequestError({ + message: "Failed to assign custom role due to RBAC restriction. Upgrade plan to assign custom role to member." + }); + } + + const projectsToInvite = invitedProjects?.length + ? await projectDAL.find({ + orgId, + $in: { + id: invitedProjects?.map(({ id }) => id) + } + }) + : []; + if (projectsToInvite.length !== invitedProjects?.length) { + throw new ForbiddenRequestError({ + message: "Access denied to one or more of the specified projects" }); } - const plan = await licenseService.getPlan(orgId); - if (plan.memberLimit !== null && plan.membersUsed >= plan.memberLimit) { - // case: limit imposed on number of members allowed - // case: number of members used exceeds the number of members allowed + if (projectsToInvite.some((el) => el.version !== ProjectVersion.V3)) { throw new BadRequestError({ - message: "Failed to invite member due to member limit reached. Upgrade plan to invite more members." + message: "One or more selected projects are not compatible with this operation. Please upgrade your projects." }); } - const invitee = await orgDAL.transaction(async (tx) => { - const inviteeUser = await userDAL.findUserByUsername(inviteeEmail, tx); - if (inviteeUser) { - // if user already exist means its already part of infisical - // Thus the signup flow is not needed anymore - const [inviteeMembership] = await orgDAL.findMembership( + + const mailsForOrgInvitation: { email: string; userId: string; firstName: string; lastName: string }[] = []; + const mailsForProjectInvitation: { email: string[]; projectName: string }[] = []; + const newProjectMemberships: TProjectMemberships[] = []; + await orgDAL.transaction(async (tx) => { + const users: Pick[] = []; + + for await (const inviteeEmail of inviteeEmails) { + let inviteeUser = await userDAL.findUserByUsername(inviteeEmail, tx); + + // if the user doesn't exist we create the user with the email + if (!inviteeUser) { + inviteeUser = await userDAL.create( + { + isAccepted: false, + email: inviteeEmail, + username: inviteeEmail, + authMethods: [AuthMethod.EMAIL], + isGhost: false + }, + tx + ); + } + + const inviteeUserId = inviteeUser?.id; + const existingEncrytionKey = await userDAL.findUserEncKeyByUserId(inviteeUserId, tx); + + // when user is missing the encrytion keys + // this could happen either if user doesn't exist or user didn't find step 3 of generating the encryption keys of srp + // So what we do is we generate a random secure password and then encrypt it with a random pub-private key + // Then when user sign in (as login is not possible as isAccepted is false) we rencrypt the private key with the user password + if (!inviteeUser || (inviteeUser && !inviteeUser?.isAccepted && !existingEncrytionKey)) { + const serverGeneratedPassword = crypto.randomBytes(32).toString("hex"); + const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(serverGeneratedPassword); + const encKeys = await generateUserSrpKeys(inviteeEmail, serverGeneratedPassword); + await userDAL.createUserEncryption( + { + userId: inviteeUserId, + encryptionVersion: 2, + protectedKey: encKeys.protectedKey, + protectedKeyIV: encKeys.protectedKeyIV, + protectedKeyTag: encKeys.protectedKeyTag, + publicKey: encKeys.publicKey, + encryptedPrivateKey: encKeys.encryptedPrivateKey, + iv: encKeys.encryptedPrivateKeyIV, + tag: encKeys.encryptedPrivateKeyTag, + salt: encKeys.salt, + verifier: encKeys.verifier, + serverEncryptedPrivateKeyEncoding: encoding, + serverEncryptedPrivateKeyTag: tag, + serverEncryptedPrivateKeyIV: iv, + serverEncryptedPrivateKey: ciphertext + }, + tx + ); + } + + const [inviteeOrgMembership] = await orgDAL.findMembership( { [`${TableName.OrgMembership}.orgId` as "orgId"]: orgId, - [`${TableName.OrgMembership}.userId` as "userId"]: inviteeUser.id + [`${TableName.OrgMembership}.userId` as "userId"]: inviteeUserId }, { tx } ); - if (inviteeMembership && inviteeMembership.status === OrgMembershipStatus.Accepted) { - throw new BadRequestError({ - message: "Failed to invite an existing member of org", - name: "Invite user to org" - }); - } - if (!inviteeMembership) { + // if there exist no org membership we set is as given by the request + if (!inviteeOrgMembership) { + if (plan?.slug !== "enterprise" && plan?.memberLimit && plan.membersUsed >= plan.memberLimit) { + // limit imposed on number of members allowed / number of members used exceeds the number of members allowed + throw new BadRequestError({ + name: "InviteUser", + message: "Failed to invite member due to member limit reached. Upgrade plan to invite more members." + }); + } + + if (plan?.slug !== "enterprise" && plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) { + // limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed + throw new BadRequestError({ + name: "InviteUser", + message: "Failed to invite member due to member limit reached. Upgrade plan to invite more members." + }); + } + + if (org?.authEnforced) { + throw new ForbiddenRequestError({ + name: "InviteUser", + message: "Failed to invite user due to org-level auth enforced for organization" + }); + } + + // as its used by project invite also + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Member); + let roleId; + const orgRole = isCustomOrgRole ? OrgMembershipRole.Custom : organizationRoleSlug; + if (isCustomOrgRole) { + const customRole = await orgRoleDAL.findOne({ slug: organizationRoleSlug, orgId }); + if (!customRole) { + throw new NotFoundError({ + name: "InviteUser", + message: `Custom organization role with slug '${orgRole}' not found` + }); + } + roleId = customRole.id; + } + await orgDAL.createMembership( { userId: inviteeUser.id, inviteEmail: inviteeEmail, orgId, - role: OrgMembershipRole.Member, - status: OrgMembershipStatus.Invited + role: orgRole, + status: OrgMembershipStatus.Invited, + isActive: true, + roleId }, tx ); + mailsForOrgInvitation.push({ + email: inviteeEmail, + userId: inviteeUser.id, + firstName: inviteeUser?.firstName || "", + lastName: inviteeUser.lastName || "" + }); } - return inviteeUser; + + users.push(inviteeUser); } - const isEmailInvalid = await isDisposableEmail(inviteeEmail); - if (isEmailInvalid) { - throw new BadRequestError({ - message: "Provided a disposable email", - name: "Org invite" + + const userIds = users.map(({ id }) => id); + const userEncryptionKeys = await userDAL.findUserEncKeyByUserIdsBatch({ userIds }, tx); + // we don't need to spam with email. Thus org invitation doesn't need project invitation again + const userIdsWithOrgInvitation = new Set(mailsForOrgInvitation.map((el) => el.userId)); + + // if there exist no project membership we set is as given by the request + for await (const project of projectsToInvite) { + const projectId = project.id; + const { permission: projectPermission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(projectPermission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.Member + ); + const existingMembers = await projectMembershipDAL.find( + { + projectId: project.id, + $in: { userId: userIds } + }, + { tx } + ); + const existingMembersGroupByUserId = groupBy(existingMembers, (i) => i.userId); + const userWithEncryptionKeyInvitedToProject = userEncryptionKeys.filter( + (user) => !existingMembersGroupByUserId?.[user.userId] + ); + + // eslint-disable-next-line no-continue + if (!userWithEncryptionKeyInvitedToProject.length) continue; + + // validate custom project role + const invitedProjectRoles = invitedProjects.find((el) => el.id === project.id)?.projectRoleSlug || [ + ProjectMembershipRole.Member + ]; + + const customProjectRoles = invitedProjectRoles.filter( + (role) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole) + ); + const hasCustomRole = Boolean(customProjectRoles.length); + if (hasCustomRole) { + if (!plan?.rbac) + throw new BadRequestError({ + name: "InviteUser", + message: + "Failed to assign custom role due to RBAC restriction. Upgrade plan to assign custom role to member." + }); + } + + const customRoles = hasCustomRole + ? await projectRoleDAL.find({ + projectId, + $in: { slug: customProjectRoles.map((role) => role) } + }) + : []; + if (customRoles.length !== customProjectRoles.length) { + throw new NotFoundError({ name: "InviteUser", message: "Custom project role not found" }); + } + + const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug); + + // this will auto generate bot + const { botKey, bot: autoGeneratedBot } = await projectBotService.getBotKey(projectId, true); + + const ghostUser = await projectDAL.findProjectGhostUser(projectId, tx); + let ghostUserId = ghostUser?.id; + + // backfill missing ghost user + if (!ghostUserId) { + const newGhostUser = await addGhostUser(project.orgId, tx); + const projectMembership = await projectMembershipDAL.create( + { + userId: newGhostUser.user.id, + projectId: project.id + }, + tx + ); + await projectUserMembershipRoleDAL.create( + { projectMembershipId: projectMembership.id, role: ProjectMembershipRole.Admin }, + tx + ); + + const { key: encryptedProjectKey, iv: encryptedProjectKeyIv } = createProjectKey({ + publicKey: newGhostUser.keys.publicKey, + privateKey: newGhostUser.keys.plainPrivateKey, + plainProjectKey: botKey + }); + + // 4. Save the project key for the ghost user. + await projectKeyDAL.create( + { + projectId: project.id, + receiverId: newGhostUser.user.id, + encryptedKey: encryptedProjectKey, + nonce: encryptedProjectKeyIv, + senderId: newGhostUser.user.id + }, + tx + ); + + const { iv, tag, ciphertext, encoding, algorithm } = infisicalSymmetricEncypt( + newGhostUser.keys.plainPrivateKey + ); + if (autoGeneratedBot) { + await projectBotDAL.updateById( + autoGeneratedBot.id, + { + tag, + iv, + encryptedProjectKey, + encryptedProjectKeyNonce: encryptedProjectKeyIv, + encryptedPrivateKey: ciphertext, + isActive: true, + publicKey: newGhostUser.keys.publicKey, + senderId: newGhostUser.user.id, + algorithm, + keyEncoding: encoding + }, + tx + ); + } + ghostUserId = newGhostUser.user.id; + } + + const bot = await projectBotDAL.findOne({ projectId }, tx); + if (!bot) { + throw new NotFoundError({ + name: "InviteUser", + message: `Failed to find project bot for project with ID '${projectId}'` + }); + } + + const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUserId, projectId, tx); + if (!ghostUserLatestKey) { + throw new NotFoundError({ + name: "InviteUser", + message: `Failed to find project owner's latest key for project with ID '${projectId}'` + }); + } + + const botPrivateKey = infisicalSymmetricDecrypt({ + keyEncoding: bot.keyEncoding as SecretKeyEncoding, + iv: bot.iv, + tag: bot.tag, + ciphertext: bot.encryptedPrivateKey + }); + + const newWsMembers = assignWorkspaceKeysToMembers({ + decryptKey: ghostUserLatestKey, + userPrivateKey: botPrivateKey, + members: userWithEncryptionKeyInvitedToProject.map((userEnc) => ({ + orgMembershipId: userEnc.userId, + projectMembershipRole: ProjectMembershipRole.Admin, + userPublicKey: userEnc.publicKey + })) + }); + + const projectMemberships = await projectMembershipDAL.insertMany( + userWithEncryptionKeyInvitedToProject.map((userEnc) => ({ + projectId, + userId: userEnc.userId + })), + tx + ); + newProjectMemberships.push(...projectMemberships); + + const sanitizedProjectMembershipRoles: TProjectUserMembershipRolesInsert[] = []; + invitedProjectRoles.forEach((projectRole) => { + const isCustomRole = Boolean(customRolesGroupBySlug?.[projectRole]?.[0]); + projectMemberships.forEach((membership) => { + sanitizedProjectMembershipRoles.push({ + projectMembershipId: membership.id, + role: isCustomRole ? ProjectMembershipRole.Custom : projectRole, + customRoleId: customRolesGroupBySlug[projectRole] ? customRolesGroupBySlug[projectRole][0].id : null + }); + }); + }); + await projectUserMembershipRoleDAL.insertMany(sanitizedProjectMembershipRoles, tx); + + await projectKeyDAL.insertMany( + newWsMembers.map((el) => ({ + encryptedKey: el.workspaceEncryptedKey, + nonce: el.workspaceEncryptedNonce, + senderId: ghostUserId, + receiverId: el.orgMembershipId, + projectId + })), + tx + ); + mailsForProjectInvitation.push({ + email: userWithEncryptionKeyInvitedToProject + .filter((el) => !userIdsWithOrgInvitation.has(el.userId)) + .map((el) => el.email || el.username), + projectName: project.name }); } - // not invited before - const user = await userDAL.create( - { - username: inviteeEmail, - email: inviteeEmail, - isAccepted: false, - authMethods: [AuthMethod.EMAIL], - isGhost: false - }, - tx - ); - await orgDAL.createMembership( - { - inviteEmail: inviteeEmail, - orgId, - userId: user.id, - role: OrgMembershipRole.Member, - status: OrgMembershipStatus.Invited - }, - tx - ); - return user; - }); - - const token = await tokenService.createTokenForUser({ - type: TokenType.TOKEN_EMAIL_ORG_INVITATION, - userId: invitee.id, - orgId - }); - - const user = await userDAL.findById(userId); - const appCfg = getConfig(); - await smtpService.sendMail({ - template: SmtpTemplates.OrgInvite, - subjectLine: "Infisical organization invitation", - recipients: [inviteeEmail], - substitutions: { - inviterFirstName: user.firstName, - inviterUsername: user.username, - organizationName: org?.name, - email: inviteeEmail, - organizationId: org?.id.toString(), - token, - callback_url: `${appCfg.SITE_URL}/signupinvite` - } + return users; }); await licenseService.updateSubscriptionOrgMemberCount(orgId); + const signupTokens: { email: string; link: string }[] = []; + // send org invite mail + await Promise.allSettled( + mailsForOrgInvitation.map(async (el) => { + const token = await tokenService.createTokenForUser({ + type: TokenType.TOKEN_EMAIL_ORG_INVITATION, + userId: el.userId, + orgId + }); + + signupTokens.push({ + email: el.email, + link: `${appCfg.SITE_URL}/signupinvite?token=${token}&to=${el.email}&organization_id=${org?.id}` + }); + + return smtpService.sendMail({ + template: SmtpTemplates.OrgInvite, + subjectLine: "Infisical organization invitation", + recipients: [el.email], + substitutions: { + inviterFirstName: el.firstName, + inviterUsername: el.email, + organizationName: org?.name, + email: el.email, + organizationId: org?.id.toString(), + token, + callback_url: `${appCfg.SITE_URL}/signupinvite` + } + }); + }) + ); + + await Promise.allSettled( + mailsForProjectInvitation + .filter((el) => Boolean(el.email.length)) + .map(async (el) => { + return smtpService.sendMail({ + template: SmtpTemplates.WorkspaceInvite, + subjectLine: "Infisical project invitation", + recipients: el.email, + substitutions: { + workspaceName: el.projectName, + callback_url: `${appCfg.SITE_URL}/login` + } + }); + }) + ); + if (!appCfg.isSmtpConfigured) { - return `${appCfg.SITE_URL}/signupinvite?token=${token}&to=${inviteeEmail}&organization_id=${org?.id}`; + return { signupTokens, projectMemberships: newProjectMemberships }; } + + return { signupTokens: undefined, projectMemberships: newProjectMemberships }; }; /** @@ -527,17 +944,17 @@ export const orgServiceFactory = ({ const verifyUserToOrg = async ({ orgId, email, code }: TVerifyUserToOrgDTO) => { const user = await userDAL.findUserByUsername(email); if (!user) { - throw new BadRequestError({ message: "Invalid request", name: "Verify user to org" }); + throw new NotFoundError({ message: "User not found" }); } const [orgMembership] = await orgDAL.findMembership({ [`${TableName.OrgMembership}.userId` as "userId"]: user.id, status: OrgMembershipStatus.Invited, [`${TableName.OrgMembership}.orgId` as "orgId"]: orgId }); + if (!orgMembership) - throw new BadRequestError({ - message: "Failed to find invitation", - name: "Verify user to org" + throw new NotFoundError({ + message: "No pending invitation found" }); await tokenService.validateTokenForUser({ @@ -577,6 +994,28 @@ export const orgServiceFactory = ({ return { token, user }; }; + const getOrgMembership = async ({ + membershipId, + orgId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TGetOrgMembershipDTO) => { + const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Member); + + const membership = await orgMembershipDAL.findOrgMembershipById(membershipId); + if (!membership) { + throw new NotFoundError({ message: `Organization membership with ID '${membershipId}' not found` }); + } + if (membership.orgId !== orgId) { + throw new ForbiddenRequestError({ message: "Membership does not belong to organization" }); + } + + return membership; + }; + const deleteOrgMembership = async ({ orgId, userId, @@ -592,6 +1031,7 @@ export const orgServiceFactory = ({ orgId, orgDAL, projectMembershipDAL, + projectUserAdditionalPrivilegeDAL, projectKeyDAL, userAliasDAL, licenseService @@ -600,6 +1040,28 @@ export const orgServiceFactory = ({ return deletedMembership; }; + const listProjectMembershipsByOrgMembershipId = async ({ + orgMembershipId, + orgId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TListProjectMembershipsByOrgMembershipIdDTO) => { + const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Member); + + const membership = await orgMembershipDAL.findOrgMembershipById(orgMembershipId); + if (!membership) { + throw new NotFoundError({ message: `Organization membership with ID '${orgMembershipId}' not found` }); + } + if (membership.orgId !== orgId) throw new NotFoundError({ message: "Failed to find organization membership" }); + + const projectMemberships = await projectMembershipDAL.findProjectMembershipsByUserId(orgId, membership.user.id); + + return projectMemberships; + }; + /* * CRUD operations of incident contacts * */ @@ -660,6 +1122,7 @@ export const orgServiceFactory = ({ findOrgMembersByUsername, createOrganization, deleteOrganizationById, + getOrgMembership, deleteOrgMembership, findAllWorkspaces, addGhostUser, @@ -668,6 +1131,7 @@ export const orgServiceFactory = ({ findIncidentContacts, createIncidentContact, deleteIncidentContact, - getOrgGroups + getOrgGroups, + listProjectMembershipsByOrgMembershipId }; }; diff --git a/backend/src/services/org/org-types.ts b/backend/src/services/org/org-types.ts index 0efc7ffe13..7d4a203de6 100644 --- a/backend/src/services/org/org-types.ts +++ b/backend/src/services/org/org-types.ts @@ -1,16 +1,22 @@ import { TOrgPermission } from "@app/lib/types"; -import { ActorAuthMethod, ActorType } from "../auth/auth-type"; +import { ActorAuthMethod, ActorType, MfaMethod } from "../auth/auth-type"; export type TUpdateOrgMembershipDTO = { userId: string; orgId: string; membershipId: string; - role: string; + role?: string; + isActive?: boolean; actorOrgId: string | undefined; + metadata?: { key: string; value: string }[]; actorAuthMethod: ActorAuthMethod; }; +export type TGetOrgMembershipDTO = { + membershipId: string; +} & TOrgPermission; + export type TDeleteOrgMembershipDTO = { userId: string; orgId: string; @@ -20,12 +26,13 @@ export type TDeleteOrgMembershipDTO = { }; export type TInviteUserToOrgDTO = { - userId: string; - orgId: string; - actorOrgId: string | undefined; - actorAuthMethod: ActorAuthMethod; - inviteeEmail: string; -}; + inviteeEmails: string[]; + organizationRoleSlug: string; + projects?: { + id: string; + projectRoleSlug?: string[]; + }[]; +} & TOrgPermission; export type TVerifyUserToOrgDTO = { email: string; @@ -51,7 +58,19 @@ export type TFindAllWorkspacesDTO = { }; export type TUpdateOrgDTO = { - data: Partial<{ name: string; slug: string; authEnforced: boolean; scimEnabled: boolean }>; + data: Partial<{ + name: string; + slug: string; + authEnforced: boolean; + scimEnabled: boolean; + defaultMembershipRoleSlug: string; + enforceMfa: boolean; + selectedMfaMethod: MfaMethod; + }>; } & TOrgPermission; export type TGetOrgGroupsDTO = TOrgPermission; + +export type TListProjectMembershipsByOrgMembershipIdDTO = { + orgMembershipId: string; +} & TOrgPermission; diff --git a/backend/src/services/pki-alert/expiring-pki-item-alert-queue.ts b/backend/src/services/pki-alert/expiring-pki-item-alert-queue.ts new file mode 100644 index 0000000000..a592e92710 --- /dev/null +++ b/backend/src/services/pki-alert/expiring-pki-item-alert-queue.ts @@ -0,0 +1,48 @@ +import { logger } from "@app/lib/logger"; +import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; +import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service"; + +type TDailyExpiringPkiItemAlertQueueServiceFactoryDep = { + queueService: TQueueServiceFactory; + pkiAlertService: Pick; +}; + +export type TDailyExpiringPkiItemAlertQueueServiceFactory = ReturnType< + typeof dailyExpiringPkiItemAlertQueueServiceFactory +>; + +export const dailyExpiringPkiItemAlertQueueServiceFactory = ({ + queueService, + pkiAlertService +}: TDailyExpiringPkiItemAlertQueueServiceFactoryDep) => { + queueService.start(QueueName.DailyExpiringPkiItemAlert, async () => { + logger.info(`${QueueName.DailyExpiringPkiItemAlert}: queue task started`); + await pkiAlertService.sendPkiItemExpiryNotices(); + logger.info(`${QueueName.DailyExpiringPkiItemAlert}: queue task completed`); + }); + + // we do a repeat cron job in utc timezone at 12 Midnight each day + const startSendingAlerts = async () => { + // clear previous job + await queueService.stopRepeatableJob( + QueueName.DailyExpiringPkiItemAlert, + QueueJobs.DailyExpiringPkiItemAlert, + { pattern: "0 0 * * *", utc: true }, + QueueName.DailyExpiringPkiItemAlert // just a job id + ); + + await queueService.queue(QueueName.DailyExpiringPkiItemAlert, QueueJobs.DailyExpiringPkiItemAlert, undefined, { + delay: 5000, + jobId: QueueName.DailyExpiringPkiItemAlert, + repeat: { pattern: "0 0 * * *", utc: true } + }); + }; + + queueService.listen(QueueName.DailyExpiringPkiItemAlert, "failed", (_, err) => { + logger.error(err, `${QueueName.DailyExpiringPkiItemAlert}: Expiring PKI item alert failed`); + }); + + return { + startSendingAlerts + }; +}; diff --git a/backend/src/services/pki-alert/pki-alert-dal.ts b/backend/src/services/pki-alert/pki-alert-dal.ts new file mode 100644 index 0000000000..d4d4fa987a --- /dev/null +++ b/backend/src/services/pki-alert/pki-alert-dal.ts @@ -0,0 +1,84 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +import { PkiItemType } from "../pki-collection/pki-collection-types"; + +export type TPkiAlertDALFactory = ReturnType; + +export const pkiAlertDALFactory = (db: TDbClient) => { + const pkiAlertOrm = ormify(db, TableName.PkiAlert); + + const getExpiringPkiCollectionItemsForAlerting = async () => { + try { + type AlertItem = { + type: PkiItemType; + id: string; // id of the CA or certificate + expiryDate: Date; + serialNumber: string; + friendlyName: string; + pkiCollectionId: string; + alertId: string; + alertName: string; + alertBeforeDays: number; + recipientEmails: string; + }; + + // gets CAs and certificates as part of PKI collection items + const combinedQuery = db + .replicaNode() + .select( + db.raw("? as type", [PkiItemType.CA]), + `${PkiItemType.CA}.id`, + `${PkiItemType.CA}.notAfter as expiryDate`, + `${PkiItemType.CA}.serialNumber`, + `${PkiItemType.CA}.friendlyName`, + "pci.pkiCollectionId" + ) + .from(`${TableName.CertificateAuthority} as ${PkiItemType.CA}`) + .join(`${TableName.PkiCollectionItem} as pci`, `${PkiItemType.CA}.id`, "pci.caId") + .unionAll((qb) => { + void qb + .select( + db.raw("? as type", [PkiItemType.CERTIFICATE]), + `${PkiItemType.CERTIFICATE}.id`, + `${PkiItemType.CERTIFICATE}.notAfter as expiryDate`, + `${PkiItemType.CERTIFICATE}.serialNumber`, + `${PkiItemType.CERTIFICATE}.friendlyName`, + "pci.pkiCollectionId" + ) + .from(`${TableName.Certificate} as ${PkiItemType.CERTIFICATE}`) + .join(`${TableName.PkiCollectionItem} as pci`, `${PkiItemType.CERTIFICATE}.id`, "pci.certId"); + }); + + /** + * Gets alerts to send based on alertBeforeDays on PKI alerts connected to PKI collection items + * Note: Results are clamped to 1-day window to avoid sending multiple alerts for the same item + */ + const alertQuery = db + .replicaNode() + .select("combined.*", "pa.id as alertId", "pa.name as alertName", "pa.alertBeforeDays", "pa.recipientEmails") + .from(db.raw("(?) as combined", [combinedQuery])) + .join(`${TableName.PkiAlert} as pa`, "combined.pkiCollectionId", "pa.pkiCollectionId") + .whereRaw( + ` + combined."expiryDate" <= CURRENT_TIMESTAMP + (pa."alertBeforeDays" * INTERVAL '1 day') + AND combined."expiryDate" > CURRENT_TIMESTAMP + ((pa."alertBeforeDays" - 1) * INTERVAL '1 day') + ` + ) + .orderBy("combined.expiryDate"); + + const results = (await alertQuery) as AlertItem[]; + + return results; + } catch (error) { + throw new DatabaseError({ error, name: "Get expiring PKI collection items for alerting" }); + } + }; + + return { + getExpiringPkiCollectionItemsForAlerting, + ...pkiAlertOrm + }; +}; diff --git a/backend/src/services/pki-alert/pki-alert-service.ts b/backend/src/services/pki-alert/pki-alert-service.ts new file mode 100644 index 0000000000..1e7d268258 --- /dev/null +++ b/backend/src/services/pki-alert/pki-alert-service.ts @@ -0,0 +1,183 @@ +import { ForbiddenError } from "@casl/ability"; + +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { groupBy } from "@app/lib/fn"; +import { TPkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal"; +import { pkiItemTypeToNameMap } from "@app/services/pki-collection/pki-collection-types"; +import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; + +import { TPkiAlertDALFactory } from "./pki-alert-dal"; +import { TCreateAlertDTO, TDeleteAlertDTO, TGetAlertByIdDTO, TUpdateAlertDTO } from "./pki-alert-types"; + +type TPkiAlertServiceFactoryDep = { + pkiAlertDAL: Pick< + TPkiAlertDALFactory, + "create" | "findById" | "updateById" | "deleteById" | "getExpiringPkiCollectionItemsForAlerting" + >; + pkiCollectionDAL: Pick; + permissionService: Pick; + smtpService: Pick; +}; + +export type TPkiAlertServiceFactory = ReturnType; + +export const pkiAlertServiceFactory = ({ + pkiAlertDAL, + pkiCollectionDAL, + permissionService, + smtpService +}: TPkiAlertServiceFactoryDep) => { + const sendPkiItemExpiryNotices = async () => { + const allAlertItems = await pkiAlertDAL.getExpiringPkiCollectionItemsForAlerting(); + + const flattenedResults = allAlertItems.flatMap(({ recipientEmails, ...item }) => + recipientEmails.split(",").map((email) => ({ + ...item, + recipientEmail: email.trim() + })) + ); + + const groupedByEmail = groupBy(flattenedResults, (item) => item.recipientEmail); + + for await (const [email, items] of Object.entries(groupedByEmail)) { + const groupedByAlert = groupBy(items, (item) => item.alertId); + for await (const [, alertItems] of Object.entries(groupedByAlert)) { + await smtpService.sendMail({ + recipients: [email], + subjectLine: `Infisical CA/Certificate expiration notice: ${alertItems[0].alertName}`, + substitutions: { + alertName: alertItems[0].alertName, + alertBeforeDays: items[0].alertBeforeDays, + items: alertItems.map((alertItem) => ({ + ...alertItem, + type: pkiItemTypeToNameMap[alertItem.type], + expiryDate: new Date(alertItem.expiryDate).toString() + })) + }, + template: SmtpTemplates.PkiExpirationAlert + }); + } + } + }; + + const createPkiAlert = async ({ + projectId, + name, + pkiCollectionId, + alertBeforeDays, + emails, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TCreateAlertDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.PkiAlerts); + + const pkiCollection = await pkiCollectionDAL.findById(pkiCollectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${pkiCollectionId}' not found` }); + if (pkiCollection.projectId !== projectId) + throw new ForbiddenRequestError({ message: "PKI collection does not belong to the specified project." }); + + const alert = await pkiAlertDAL.create({ + projectId, + pkiCollectionId, + name, + alertBeforeDays, + recipientEmails: emails.join(",") + }); + return alert; + }; + + const getPkiAlertById = async ({ alertId, actorId, actorAuthMethod, actor, actorOrgId }: TGetAlertByIdDTO) => { + const alert = await pkiAlertDAL.findById(alertId); + if (!alert) throw new NotFoundError({ message: `Alert with ID '${alertId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + alert.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PkiAlerts); + return alert; + }; + + const updatePkiAlert = async ({ + alertId, + name, + pkiCollectionId, + alertBeforeDays, + emails, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TUpdateAlertDTO) => { + let alert = await pkiAlertDAL.findById(alertId); + if (!alert) throw new NotFoundError({ message: `Alert with ID '${alertId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + alert.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.PkiAlerts); + + if (pkiCollectionId) { + const pkiCollection = await pkiCollectionDAL.findById(pkiCollectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${pkiCollectionId}' not found` }); + if (pkiCollection.projectId !== alert.projectId) { + throw new ForbiddenRequestError({ message: "PKI collection does not belong to the specified project." }); + } + } + + alert = await pkiAlertDAL.updateById(alertId, { + name, + alertBeforeDays, + ...(pkiCollectionId && { pkiCollectionId }), + ...(emails && { recipientEmails: emails.join(",") }) + }); + + return alert; + }; + + const deletePkiAlert = async ({ alertId, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteAlertDTO) => { + let alert = await pkiAlertDAL.findById(alertId); + if (!alert) throw new NotFoundError({ message: `Alert with ID '${alertId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + alert.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.PkiAlerts); + alert = await pkiAlertDAL.deleteById(alertId); + return alert; + }; + + return { + sendPkiItemExpiryNotices, + createPkiAlert, + getPkiAlertById, + updatePkiAlert, + deletePkiAlert + }; +}; diff --git a/backend/src/services/pki-alert/pki-alert-types.ts b/backend/src/services/pki-alert/pki-alert-types.ts new file mode 100644 index 0000000000..c00be0e038 --- /dev/null +++ b/backend/src/services/pki-alert/pki-alert-types.ts @@ -0,0 +1,24 @@ +import { TProjectPermission } from "@app/lib/types"; + +export type TCreateAlertDTO = { + name: string; + pkiCollectionId: string; + alertBeforeDays: number; + emails: string[]; +} & TProjectPermission; + +export type TGetAlertByIdDTO = { + alertId: string; +} & Omit; + +export type TUpdateAlertDTO = { + alertId: string; + name?: string; + pkiCollectionId?: string; + alertBeforeDays?: number; + emails?: string[]; +} & Omit; + +export type TDeleteAlertDTO = { + alertId: string; +} & Omit; diff --git a/backend/src/services/pki-collection/pki-collection-dal.ts b/backend/src/services/pki-collection/pki-collection-dal.ts new file mode 100644 index 0000000000..382b4c6ef9 --- /dev/null +++ b/backend/src/services/pki-collection/pki-collection-dal.ts @@ -0,0 +1,13 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TPkiCollectionDALFactory = ReturnType; + +export const pkiCollectionDALFactory = (db: TDbClient) => { + const pkiCollectionOrm = ormify(db, TableName.PkiCollection); + + return { + ...pkiCollectionOrm + }; +}; diff --git a/backend/src/services/pki-collection/pki-collection-fns.ts b/backend/src/services/pki-collection/pki-collection-fns.ts new file mode 100644 index 0000000000..2b74bcf5b6 --- /dev/null +++ b/backend/src/services/pki-collection/pki-collection-fns.ts @@ -0,0 +1,30 @@ +import { TPkiCollectionItems } from "@app/db/schemas"; + +import { PkiItemType } from "./pki-collection-types"; + +/** + * Transforms a PKI Collection Item from the database to the expected API response format + */ +export const transformPkiCollectionItem = (pkiCollectionItem: TPkiCollectionItems) => { + let type: PkiItemType; + let itemId: string; + + if (pkiCollectionItem.caId) { + type = PkiItemType.CA; + itemId = pkiCollectionItem.caId; + } else if (pkiCollectionItem.certId) { + type = PkiItemType.CERTIFICATE; + itemId = pkiCollectionItem.certId; + } else { + throw new Error("Invalid PKI Collection Item: must have either caId or certId"); + } + + return { + id: pkiCollectionItem.id, + pkiCollectionId: pkiCollectionItem.pkiCollectionId, + type, + itemId, + createdAt: pkiCollectionItem.createdAt, + updatedAt: pkiCollectionItem.updatedAt + }; +}; diff --git a/backend/src/services/pki-collection/pki-collection-item-dal.ts b/backend/src/services/pki-collection/pki-collection-item-dal.ts new file mode 100644 index 0000000000..de896e15c6 --- /dev/null +++ b/backend/src/services/pki-collection/pki-collection-item-dal.ts @@ -0,0 +1,93 @@ +import { TDbClient } from "@app/db"; +import { TableName, TPkiCollectionItems } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +import { PkiItemType } from "./pki-collection-types"; + +export type TPkiCollectionItemDALFactory = ReturnType; + +export const pkiCollectionItemDALFactory = (db: TDbClient) => { + const pkiCollectionItemOrm = ormify(db, TableName.PkiCollectionItem); + + const findPkiCollectionItems = async ({ + collectionId, + type, + offset, + limit + }: { + collectionId: string; + type?: PkiItemType; + offset?: number; + limit?: number; + }) => { + try { + const query = db + .replicaNode()(TableName.PkiCollectionItem) + .select( + "pki_collection_items.*", + db.raw( + `COALESCE("${TableName.CertificateAuthority}"."notBefore", "${TableName.Certificate}"."notBefore") as "notBefore"` + ), + db.raw( + `COALESCE("${TableName.CertificateAuthority}"."notAfter", "${TableName.Certificate}"."notAfter") as "notAfter"` + ), + db.raw( + `COALESCE("${TableName.CertificateAuthority}"."friendlyName", "${TableName.Certificate}"."friendlyName") as "friendlyName"` + ) + ) + .leftJoin( + TableName.CertificateAuthority, + `${TableName.PkiCollectionItem}.caId`, + `${TableName.CertificateAuthority}.id` + ) + .leftJoin(TableName.Certificate, `${TableName.PkiCollectionItem}.certId`, `${TableName.Certificate}.id`) + .where((builder) => { + void builder.where(`${TableName.PkiCollectionItem}.pkiCollectionId`, collectionId); + if (type === PkiItemType.CA) { + void builder.whereNull(`${TableName.PkiCollectionItem}.certId`); + } else if (type === PkiItemType.CERTIFICATE) { + void builder.whereNull(`${TableName.PkiCollectionItem}.caId`); + } + }); + + if (offset) { + void query.offset(offset); + } + if (limit) { + void query.limit(limit); + } + + void query.orderBy(`${TableName.PkiCollectionItem}.createdAt`, "desc"); + + const result = await query; + return result as (TPkiCollectionItems & { notAfter: Date; notBefore: Date; friendlyName: string })[]; + } catch (error) { + throw new DatabaseError({ error, name: "Find all PKI collection items" }); + } + }; + + const countItemsInPkiCollection = async (collectionId: string) => { + try { + interface CountResult { + count: string; + } + + const query = db + .replicaNode()(TableName.PkiCollectionItem) + .where(`${TableName.PkiCollectionItem}.pkiCollectionId`, collectionId); + + const count = await query.count("*").first(); + + return parseInt((count as unknown as CountResult).count || "0", 10); + } catch (error) { + throw new DatabaseError({ error, name: "Count all PKI collection items" }); + } + }; + + return { + ...pkiCollectionItemOrm, + findPkiCollectionItems, + countItemsInPkiCollection + }; +}; diff --git a/backend/src/services/pki-collection/pki-collection-service.ts b/backend/src/services/pki-collection/pki-collection-service.ts new file mode 100644 index 0000000000..ef849c54f9 --- /dev/null +++ b/backend/src/services/pki-collection/pki-collection-service.ts @@ -0,0 +1,331 @@ +import { ForbiddenError } from "@casl/ability"; + +import { TPkiCollectionItems } from "@app/db/schemas"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; + +import { TPkiCollectionDALFactory } from "./pki-collection-dal"; +import { transformPkiCollectionItem } from "./pki-collection-fns"; +import { TPkiCollectionItemDALFactory } from "./pki-collection-item-dal"; +import { + PkiItemType, + TAddItemToPkiCollectionDTO, + TCreatePkiCollectionDTO, + TDeletePkiCollectionDTO, + TGetPkiCollectionByIdDTO, + TGetPkiCollectionItems, + TRemoveItemFromPkiCollectionDTO, + TUpdatePkiCollectionDTO +} from "./pki-collection-types"; + +type TPkiCollectionServiceFactoryDep = { + pkiCollectionDAL: Pick; + pkiCollectionItemDAL: Pick< + TPkiCollectionItemDALFactory, + "findOne" | "create" | "deleteById" | "findPkiCollectionItems" | "countItemsInPkiCollection" + >; + certificateAuthorityDAL: Pick; + certificateDAL: Pick; + permissionService: Pick; +}; + +export type TPkiCollectionServiceFactory = ReturnType; + +export const pkiCollectionServiceFactory = ({ + pkiCollectionDAL, + pkiCollectionItemDAL, + certificateAuthorityDAL, + certificateDAL, + permissionService +}: TPkiCollectionServiceFactoryDep) => { + const createPkiCollection = async ({ + name, + description, + projectId, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TCreatePkiCollectionDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.PkiCollections + ); + + const pkiCollection = await pkiCollectionDAL.create({ + projectId, + name, + description + }); + + return pkiCollection; + }; + + const getPkiCollectionById = async ({ + collectionId, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TGetPkiCollectionByIdDTO) => { + const pkiCollection = await pkiCollectionDAL.findById(collectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${collectionId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + pkiCollection.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PkiCollections); + return pkiCollection; + }; + + const updatePkiCollection = async ({ + collectionId, + name, + description, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TUpdatePkiCollectionDTO) => { + let pkiCollection = await pkiCollectionDAL.findById(collectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${collectionId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + pkiCollection.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.PkiCollections); + pkiCollection = await pkiCollectionDAL.updateById(collectionId, { + name, + description + }); + + return pkiCollection; + }; + + const deletePkiCollection = async ({ + collectionId, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TDeletePkiCollectionDTO) => { + let pkiCollection = await pkiCollectionDAL.findById(collectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${collectionId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + pkiCollection.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + ProjectPermissionSub.PkiCollections + ); + pkiCollection = await pkiCollectionDAL.deleteById(collectionId); + return pkiCollection; + }; + + const getPkiCollectionItems = async ({ + collectionId, + type, + offset = 0, + limit = 25, + actorId, + actorAuthMethod, + actor, + actorOrgId + }: TGetPkiCollectionItems) => { + const pkiCollection = await pkiCollectionDAL.findById(collectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${collectionId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + pkiCollection.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PkiCollections); + + const pkiCollectionItems = await pkiCollectionItemDAL.findPkiCollectionItems({ + collectionId, + type, + offset, + limit + }); + + const count = await pkiCollectionItemDAL.countItemsInPkiCollection(collectionId); + + return { + pkiCollection, + pkiCollectionItems: pkiCollectionItems.map((p) => ({ + ...transformPkiCollectionItem(p), + notBefore: p.notBefore, + notAfter: p.notAfter, + friendlyName: p.friendlyName + })), + totalCount: count + }; + }; + + const addItemToPkiCollection = async ({ + collectionId, + actorId, + actorAuthMethod, + actor, + actorOrgId, + type, + itemId + }: TAddItemToPkiCollectionDTO) => { + const pkiCollection = await pkiCollectionDAL.findById(collectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${collectionId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + pkiCollection.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + ProjectPermissionSub.PkiCollections + ); + + let pkiCollectionItem: TPkiCollectionItems; + switch (type) { + case PkiItemType.CA: { + // validate that CA has not already been added to PKI collection + const isCaAdded = await pkiCollectionItemDAL.findOne({ + pkiCollectionId: collectionId, + caId: itemId + }); + + if (isCaAdded) throw new BadRequestError({ message: "CA is already part of the PKI collection" }); + + // validate that there exists a CA in same project as PKI collection + const ca = await certificateAuthorityDAL.findOne({ + id: itemId, + projectId: pkiCollection.projectId + }); + + if (!ca) throw new NotFoundError({ message: `CA with ID '${itemId}' not found` }); + + pkiCollectionItem = await pkiCollectionItemDAL.create({ + pkiCollectionId: collectionId, + caId: itemId + }); + break; + } + case PkiItemType.CERTIFICATE: { + // validate that certificate has not already been added to PKI collection + const isCertAdded = await pkiCollectionItemDAL.findOne({ + pkiCollectionId: collectionId, + certId: itemId + }); + if (isCertAdded) throw new BadRequestError({ message: "Certificate already part of the PKI collection" }); + + // validate that there exists a certificate in same project as PKI collection + const cas = await certificateAuthorityDAL.find({ projectId: pkiCollection.projectId }); + + // TODO: consider making this more efficient + const [certificate] = await certificateDAL.find({ + $in: { + caId: cas.map((ca) => ca.id) + }, + id: itemId + }); + if (!certificate) throw new NotFoundError({ message: `Certificate with ID '${itemId}' not found` }); + + pkiCollectionItem = await pkiCollectionItemDAL.create({ + pkiCollectionId: collectionId, + certId: itemId + }); + break; + } + default: { + throw new BadRequestError({ message: "Invalid PKI item type" }); + } + } + + return { + pkiCollection, + pkiCollectionItem: transformPkiCollectionItem(pkiCollectionItem) + }; + }; + + const removeItemFromPkiCollection = async ({ + collectionId, + actorId, + actorAuthMethod, + actor, + actorOrgId, + itemId + }: TRemoveItemFromPkiCollectionDTO) => { + const pkiCollection = await pkiCollectionDAL.findById(collectionId); + if (!pkiCollection) throw new NotFoundError({ message: `PKI collection with ID '${collectionId}' not found` }); + + let pkiCollectionItem = await pkiCollectionItemDAL.findOne({ + pkiCollectionId: collectionId, + id: itemId + }); + + if (!pkiCollectionItem) throw new NotFoundError({ message: `PKI collection item with ID '${itemId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + pkiCollection.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + ProjectPermissionSub.PkiCollections + ); + + pkiCollectionItem = await pkiCollectionItemDAL.deleteById(itemId); + + return { + pkiCollection, + pkiCollectionItem: transformPkiCollectionItem(pkiCollectionItem) + }; + }; + + return { + createPkiCollection, + getPkiCollectionById, + updatePkiCollection, + deletePkiCollection, + getPkiCollectionItems, + addItemToPkiCollection, + removeItemFromPkiCollection + }; +}; diff --git a/backend/src/services/pki-collection/pki-collection-types.ts b/backend/src/services/pki-collection/pki-collection-types.ts new file mode 100644 index 0000000000..e82ca31d0f --- /dev/null +++ b/backend/src/services/pki-collection/pki-collection-types.ts @@ -0,0 +1,48 @@ +import { TProjectPermission } from "@app/lib/types"; + +export type TCreatePkiCollectionDTO = { + name: string; + description: string; +} & TProjectPermission; + +export type TGetPkiCollectionByIdDTO = { + collectionId: string; +} & Omit; + +export type TUpdatePkiCollectionDTO = { + collectionId: string; + name?: string; + description?: string; +} & Omit; + +export type TDeletePkiCollectionDTO = { + collectionId: string; +} & Omit; + +export enum PkiItemType { + CERTIFICATE = "certificate", + CA = "ca" +} + +export const pkiItemTypeToNameMap: { [K in PkiItemType]: string } = { + [PkiItemType.CA]: "CA", + [PkiItemType.CERTIFICATE]: "Certificate" +}; + +export type TGetPkiCollectionItems = { + collectionId: string; + type?: PkiItemType; + offset: number; + limit: number; +} & Omit; + +export type TAddItemToPkiCollectionDTO = { + collectionId: string; + type: PkiItemType; + itemId: string; +} & Omit; + +export type TRemoveItemFromPkiCollectionDTO = { + collectionId: string; + itemId: string; +} & Omit; diff --git a/backend/src/services/project-bot/project-bot-dal.ts b/backend/src/services/project-bot/project-bot-dal.ts index 74abf8f210..ecb23f78b9 100644 --- a/backend/src/services/project-bot/project-bot-dal.ts +++ b/backend/src/services/project-bot/project-bot-dal.ts @@ -1,7 +1,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName, TProjectBots } from "@app/db/schemas"; +import { TableName, TProjectBots, TUserEncryptionKeys } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols } from "@app/lib/knex"; @@ -12,7 +12,7 @@ export const projectBotDALFactory = (db: TDbClient) => { const findOne = async (filter: Partial, tx?: Knex) => { try { - const bot = await (tx || db)(TableName.ProjectBot) + const bot = await (tx || db.replicaNode())(TableName.ProjectBot) .where(filter) .leftJoin(TableName.Users, `${TableName.ProjectBot}.senderId`, `${TableName.Users}.id`) .leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`) @@ -41,5 +41,44 @@ export const projectBotDALFactory = (db: TDbClient) => { } }; - return { ...projectBotOrm, findOne, findProjectByBotId }; + const findProjectUserWorkspaceKey = async (projectId: string) => { + try { + const doc = await db + .replicaNode()(TableName.ProjectMembership) + .where(`${TableName.ProjectMembership}.projectId` as "projectId", projectId) + .where(`${TableName.ProjectKeys}.projectId` as "projectId", projectId) + .where(`${TableName.Users}.isGhost` as "isGhost", false) + .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) + .join(TableName.ProjectKeys, `${TableName.ProjectMembership}.userId`, `${TableName.ProjectKeys}.receiverId`) + .join( + TableName.UserEncryptionKey, + `${TableName.UserEncryptionKey}.userId`, + `${TableName.Users}.id` + ) + .join( + db(TableName.UserEncryptionKey).as("senderUserEncryption"), + `${TableName.ProjectKeys}.senderId`, + `senderUserEncryption.userId` + ) + .whereNotNull(`${TableName.UserEncryptionKey}.serverEncryptedPrivateKey`) + .whereNotNull(`${TableName.UserEncryptionKey}.serverEncryptedPrivateKeyIV`) + .whereNotNull(`${TableName.UserEncryptionKey}.serverEncryptedPrivateKeyTag`) + .select( + db.ref("serverEncryptedPrivateKey").withSchema(TableName.UserEncryptionKey), + db.ref("serverEncryptedPrivateKeyTag").withSchema(TableName.UserEncryptionKey), + db.ref("serverEncryptedPrivateKeyIV").withSchema(TableName.UserEncryptionKey), + db.ref("serverEncryptedPrivateKeyEncoding").withSchema(TableName.UserEncryptionKey), + db.ref("encryptedKey").withSchema(TableName.ProjectKeys).as("projectEncryptedKey"), + db.ref("nonce").withSchema(TableName.ProjectKeys).as("projectKeyNonce"), + db.ref("publicKey").withSchema("senderUserEncryption").as("senderPublicKey"), + db.ref("id").withSchema(TableName.Users).as("userId") + ) + .first(); + return doc; + } catch (error) { + throw new DatabaseError({ error, name: "Find all project members" }); + } + }; + + return { ...projectBotOrm, findOne, findProjectByBotId, findProjectUserWorkspaceKey }; }; diff --git a/backend/src/services/project-bot/project-bot-fns.ts b/backend/src/services/project-bot/project-bot-fns.ts index 00604b37fd..a8e507bc75 100644 --- a/backend/src/services/project-bot/project-bot-fns.ts +++ b/backend/src/services/project-bot/project-bot-fns.ts @@ -1,6 +1,12 @@ import { SecretKeyEncoding } from "@app/db/schemas"; -import { decryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { + decryptAsymmetric, + encryptAsymmetric, + generateAsymmetricKeyPair, + infisicalSymmetricDecrypt, + infisicalSymmetricEncypt +} from "@app/lib/crypto/encryption"; +import { NotFoundError } from "@app/lib/errors"; import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal"; import { TProjectDALFactory } from "../project/project-dal"; @@ -18,25 +24,94 @@ export const getBotKeyFnFactory = ( projectBotDAL: TProjectBotDALFactory, projectDAL: Pick ) => { - const getBotKeyFn = async (projectId: string) => { + const getBotKeyFn = async (projectId: string, shouldGetBotKey?: boolean) => { const project = await projectDAL.findById(projectId); - if (!project) throw new BadRequestError({ message: "Project not found during bot lookup." }); + if (!project) + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found during bot lookup. Are you sure you are using the correct project ID?` + }); + + if (project.version === 3 && !shouldGetBotKey) { + return { project, shouldUseSecretV2Bridge: true }; + } const bot = await projectBotDAL.findOne({ projectId: project.id }); + if (!bot || !bot.isActive || !bot.encryptedProjectKey || !bot.encryptedProjectKeyNonce) { + // trying to set bot automatically + const projectV1Keys = await projectBotDAL.findProjectUserWorkspaceKey(projectId); + if (!projectV1Keys) { + throw new NotFoundError({ + message: `Project bot not found for project with ID '${projectId}'. Please ask an administrator to log-in to the Infisical Console.` + }); + } + let userPrivateKey = ""; + if ( + projectV1Keys?.serverEncryptedPrivateKey && + projectV1Keys.serverEncryptedPrivateKeyIV && + projectV1Keys.serverEncryptedPrivateKeyTag && + projectV1Keys.serverEncryptedPrivateKeyEncoding + ) { + userPrivateKey = infisicalSymmetricDecrypt({ + iv: projectV1Keys.serverEncryptedPrivateKeyIV, + tag: projectV1Keys.serverEncryptedPrivateKeyTag, + ciphertext: projectV1Keys.serverEncryptedPrivateKey, + keyEncoding: projectV1Keys.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding + }); + } + const workspaceKey = decryptAsymmetric({ + ciphertext: projectV1Keys.projectEncryptedKey, + nonce: projectV1Keys.projectKeyNonce, + publicKey: projectV1Keys.senderPublicKey, + privateKey: userPrivateKey + }); + const botKey = generateAsymmetricKeyPair(); + const { iv, tag, ciphertext, encoding, algorithm } = infisicalSymmetricEncypt(botKey.privateKey); + const encryptedWorkspaceKey = encryptAsymmetric(workspaceKey, botKey.publicKey, userPrivateKey); - if (!bot) throw new BadRequestError({ message: "Failed to find bot key" }); - if (!bot.isActive) throw new BadRequestError({ message: "Bot is not active" }); - if (!bot.encryptedProjectKeyNonce || !bot.encryptedProjectKey) - throw new BadRequestError({ message: "Encryption key missing" }); + let botId; + if (!bot) { + const newBot = await projectBotDAL.create({ + name: "Infisical Bot (Ghost)", + projectId, + isActive: true, + tag, + iv, + encryptedPrivateKey: ciphertext, + publicKey: botKey.publicKey, + algorithm, + keyEncoding: encoding, + encryptedProjectKey: encryptedWorkspaceKey.ciphertext, + encryptedProjectKeyNonce: encryptedWorkspaceKey.nonce, + senderId: projectV1Keys.userId + }); + botId = newBot.id; + } else { + const updatedBot = await projectBotDAL.updateById(bot.id, { + isActive: true, + tag, + iv, + encryptedPrivateKey: ciphertext, + publicKey: botKey.publicKey, + algorithm, + keyEncoding: encoding, + encryptedProjectKey: encryptedWorkspaceKey.ciphertext, + encryptedProjectKeyNonce: encryptedWorkspaceKey.nonce, + senderId: projectV1Keys.userId + }); + botId = updatedBot.id; + } + + return { botKey: workspaceKey, project, shouldUseSecretV2Bridge: false, bot: { id: botId } }; + } const botPrivateKey = getBotPrivateKey({ bot }); - - return decryptAsymmetric({ + const botKey = decryptAsymmetric({ ciphertext: bot.encryptedProjectKey, privateKey: botPrivateKey, nonce: bot.encryptedProjectKeyNonce, publicKey: bot.sender.publicKey }); + return { botKey, project, shouldUseSecretV2Bridge: false, bot: { id: bot.id } }; }; return getBotKeyFn; diff --git a/backend/src/services/project-bot/project-bot-service.ts b/backend/src/services/project-bot/project-bot-service.ts index ce7782a80f..5d7f78c1bb 100644 --- a/backend/src/services/project-bot/project-bot-service.ts +++ b/backend/src/services/project-bot/project-bot-service.ts @@ -5,7 +5,7 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { generateAsymmetricKeyPair } from "@app/lib/crypto"; import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { TProjectDALFactory } from "../project/project-dal"; import { TProjectBotDALFactory } from "./project-bot-dal"; @@ -27,8 +27,8 @@ export const projectBotServiceFactory = ({ }: TProjectBotServiceFactoryDep) => { const getBotKeyFn = getBotKeyFnFactory(projectBotDAL, projectDAL); - const getBotKey = async (projectId: string) => { - return getBotKeyFn(projectId); + const getBotKey = async (projectId: string, shouldGetBotKey?: boolean) => { + return getBotKeyFn(projectId, shouldGetBotKey); }; const findBotByProjectId = async ({ @@ -60,7 +60,7 @@ export const projectBotServiceFactory = ({ const project = await projectDAL.findById(projectId, tx); - if (project.version === ProjectVersion.V2) { + if (project.version === ProjectVersion.V2 || project.version === ProjectVersion.V3) { throw new BadRequestError({ message: "Failed to create bot, project is upgraded." }); } @@ -91,7 +91,7 @@ export const projectBotServiceFactory = ({ const bot = await projectBotDAL.findProjectByBotId(botId); return bot; } catch (e) { - throw new BadRequestError({ message: "Failed to find bot by ID" }); + throw new NotFoundError({ message: `Project bot with ID '${botId}' not found` }); } }; @@ -105,7 +105,7 @@ export const projectBotServiceFactory = ({ isActive }: TSetActiveStateDTO) => { const bot = await projectBotDAL.findById(botId); - if (!bot) throw new BadRequestError({ message: "Bot not found" }); + if (!bot) throw new NotFoundError({ message: `Project bot with ID '${botId}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -119,7 +119,7 @@ export const projectBotServiceFactory = ({ const project = await projectBotDAL.findProjectByBotId(botId); if (!project) { - throw new BadRequestError({ message: "Failed to find project by bot ID" }); + throw new NotFoundError({ message: `Project not found for bot with ID '${botId}'` }); } if (project.version === ProjectVersion.V2) { @@ -128,7 +128,9 @@ export const projectBotServiceFactory = ({ if (isActive) { if (!botKey?.nonce || !botKey?.encryptedKey) { - throw new BadRequestError({ message: "Failed to set bot active - missing bot key" }); + throw new NotFoundError({ + message: `Bot key not found for bot in project with ID '${botId}'. Failed to set bot state to active.` + }); } const doc = await projectBotDAL.updateById(botId, { isActive: true, @@ -136,7 +138,8 @@ export const projectBotServiceFactory = ({ encryptedProjectKeyNonce: botKey.nonce, senderId: actorId }); - if (!doc) throw new BadRequestError({ message: "Failed to update bot active state" }); + if (!doc) + throw new BadRequestError({ message: `Project bot with ID '${botId}' not found. Failed to update bot.` }); return doc; } @@ -145,7 +148,7 @@ export const projectBotServiceFactory = ({ encryptedProjectKey: null, encryptedProjectKeyNonce: null }); - if (!doc) throw new BadRequestError({ message: "Failed to update bot active state" }); + if (!doc) throw new BadRequestError({ message: `Project bot with ID '${botId}' not found. Failed to update bot.` }); return doc; }; diff --git a/backend/src/services/project-env/project-env-dal.ts b/backend/src/services/project-env/project-env-dal.ts index 42a2342987..15e37bfdc9 100644 --- a/backend/src/services/project-env/project-env-dal.ts +++ b/backend/src/services/project-env/project-env-dal.ts @@ -12,7 +12,9 @@ export const projectEnvDALFactory = (db: TDbClient) => { const findBySlugs = async (projectId: string, env: string[], tx?: Knex) => { try { - const envs = await (tx || db)(TableName.Environment).where("projectId", projectId).whereIn("slug", env); + const envs = await (tx || db.replicaNode())(TableName.Environment) + .where("projectId", projectId) + .whereIn("slug", env); return envs; } catch (error) { throw new DatabaseError({ error, name: "Find by slugs" }); @@ -22,10 +24,15 @@ export const projectEnvDALFactory = (db: TDbClient) => { // we are using postion based sorting as its a small list // this will return the last value of the position in a folder with secret imports const findLastEnvPosition = async (projectId: string, tx?: Knex) => { + // acquire update lock on project environments. + // this ensures that concurrent invocations will wait and execute sequentially + await (tx || db)(TableName.Environment).where({ projectId }).forUpdate(); + const lastPos = await (tx || db)(TableName.Environment) .where({ projectId }) .max("position", { as: "position" }) .first(); + return lastPos?.position || 0; }; @@ -58,10 +65,16 @@ export const projectEnvDALFactory = (db: TDbClient) => { } }; + const shiftPositions = async (projectId: string, pos: number, tx?: Knex) => { + // Shift all positions >= the new position up by 1 + await (tx || db)(TableName.Environment).where({ projectId }).where("position", ">=", pos).increment("position", 1); + }; + return { ...projectEnvOrm, findBySlugs, findLastEnvPosition, - updateAllPosition + updateAllPosition, + shiftPositions }; }; diff --git a/backend/src/services/project-env/project-env-service.ts b/backend/src/services/project-env/project-env-service.ts index 2acda33c00..a54e8de43e 100644 --- a/backend/src/services/project-env/project-env-service.ts +++ b/backend/src/services/project-env/project-env-service.ts @@ -3,12 +3,14 @@ import { ForbiddenError } from "@casl/ability"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { BadRequestError } from "@app/lib/errors"; +import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; import { TProjectDALFactory } from "../project/project-dal"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; import { TProjectEnvDALFactory } from "./project-env-dal"; -import { TCreateEnvDTO, TDeleteEnvDTO, TUpdateEnvDTO } from "./project-env-types"; +import { TCreateEnvDTO, TDeleteEnvDTO, TGetEnvDTO, TUpdateEnvDTO } from "./project-env-types"; type TProjectEnvServiceFactoryDep = { projectEnvDAL: TProjectEnvDALFactory; @@ -16,6 +18,7 @@ type TProjectEnvServiceFactoryDep = { projectDAL: Pick; permissionService: Pick; licenseService: Pick; + keyStore: Pick; }; export type TProjectEnvServiceFactory = ReturnType; @@ -24,6 +27,7 @@ export const projectEnvServiceFactory = ({ projectEnvDAL, permissionService, licenseService, + keyStore, projectDAL, folderDAL }: TProjectEnvServiceFactoryDep) => { @@ -33,6 +37,7 @@ export const projectEnvServiceFactory = ({ actor, actorOrgId, actorAuthMethod, + position, name, slug }: TCreateEnvDTO) => { @@ -45,32 +50,72 @@ export const projectEnvServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Environments); - const envs = await projectEnvDAL.find({ projectId }); - const existingEnv = envs.find(({ slug: envSlug }) => envSlug === slug); - if (existingEnv) - throw new BadRequestError({ - message: "Environment with slug already exist", - name: "Create envv" + const lock = await keyStore + .acquireLock([KeyStorePrefixes.ProjectEnvironmentLock(projectId)], 5000) + .catch(() => null); + + try { + if (!lock) { + await keyStore.waitTillReady({ + key: KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), + keyCheckCb: (val) => val === "true", + waitingCb: () => logger.debug("Create project environment. Waiting for "), + delay: 500 + }); + } + + const envs = await projectEnvDAL.find({ projectId }); + const existingEnv = envs.find(({ slug: envSlug }) => envSlug === slug); + if (existingEnv) + throw new BadRequestError({ + message: "Environment with slug already exist", + name: "CreateEnvironment" + }); + + const project = await projectDAL.findById(projectId); + const plan = await licenseService.getPlan(project.orgId); + if (plan.environmentLimit !== null && envs.length >= plan.environmentLimit) { + // case: limit imposed on number of environments allowed + // case: number of environments used exceeds the number of environments allowed + throw new BadRequestError({ + message: + "Failed to create environment due to environment limit reached. Upgrade plan to create more environments." + }); + } + + const env = await projectEnvDAL.transaction(async (tx) => { + if (position !== undefined) { + // Check if there's an environment at the specified position + const existingEnvWithPosition = await projectEnvDAL.findOne({ projectId, position }, tx); + + // If there is, then shift positions + if (existingEnvWithPosition) { + await projectEnvDAL.shiftPositions(projectId, position, tx); + } + + const doc = await projectEnvDAL.create({ slug, name, projectId, position }, tx); + await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx); + + return doc; + } + // If no position is specified, add to the end + const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx); + const doc = await projectEnvDAL.create({ slug, name, projectId, position: lastPos + 1 }, tx); + await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx); + + return doc; }); - const project = await projectDAL.findById(projectId); - const plan = await licenseService.getPlan(project.orgId); - if (plan.environmentLimit !== null && envs.length >= plan.environmentLimit) { - // case: limit imposed on number of environments allowed - // case: number of environments used exceeds the number of environments allowed - throw new BadRequestError({ - message: - "Failed to create environment due to environment limit reached. Upgrade plan to create more environments." - }); + await keyStore.setItemWithExpiry( + KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), + 10, + "true" + ); + + return env; + } finally { + await lock?.release(); } - - const env = await projectEnvDAL.transaction(async (tx) => { - const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx); - const doc = await projectEnvDAL.create({ slug, name, projectId, position: lastPos + 1 }, tx); - await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx); - return doc; - }); - return env; }; const updateEnvironment = async ({ @@ -93,26 +138,57 @@ export const projectEnvServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Environments); - const oldEnv = await projectEnvDAL.findOne({ id, projectId }); - if (!oldEnv) throw new BadRequestError({ message: "Environment not found" }); + const lock = await keyStore + .acquireLock([KeyStorePrefixes.ProjectEnvironmentLock(projectId)], 5000) + .catch(() => null); - if (slug) { - const existingEnv = await projectEnvDAL.findOne({ slug, projectId }); - if (existingEnv && existingEnv.id !== id) { - throw new BadRequestError({ - message: "Environment with slug already exist", - name: "Create envv" + try { + if (!lock) { + await keyStore.waitTillReady({ + key: KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), + keyCheckCb: (val) => val === "true", + waitingCb: () => logger.debug("Update project environment. Waiting for project environment update"), + delay: 500 }); } - } - const env = await projectEnvDAL.transaction(async (tx) => { - if (position) { - await projectEnvDAL.updateAllPosition(projectId, oldEnv.position, position, tx); + const oldEnv = await projectEnvDAL.findOne({ id, projectId }); + if (!oldEnv) { + throw new NotFoundError({ + message: `Environment with id '${id}' in project with ID '${projectId}' not found` + }); } - return projectEnvDAL.updateById(oldEnv.id, { name, slug, position }, tx); - }); - return { environment: env, old: oldEnv }; + if (slug) { + const existingEnv = await projectEnvDAL.findOne({ slug, projectId }); + if (existingEnv && existingEnv.id !== id) { + throw new BadRequestError({ + message: "Environment with slug already exist", + name: "UpdateEnvironment" + }); + } + } + + const env = await projectEnvDAL.transaction(async (tx) => { + if (position) { + const existingEnvWithPosition = await projectEnvDAL.findOne({ projectId, position }, tx); + + if (existingEnvWithPosition && existingEnvWithPosition.id !== oldEnv.id) { + await projectEnvDAL.updateAllPosition(projectId, oldEnv.position, position, tx); + } + } + return projectEnvDAL.updateById(oldEnv.id, { name, slug, position }, tx); + }); + + await keyStore.setItemWithExpiry( + KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), + 10, + "true" + ); + + return { environment: env, old: oldEnv }; + } finally { + await lock?.release(); + } }; const deleteEnvironment = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod, id }: TDeleteEnvDTO) => { @@ -125,23 +201,69 @@ export const projectEnvServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Environments); - const env = await projectEnvDAL.transaction(async (tx) => { - const [doc] = await projectEnvDAL.delete({ id, projectId }, tx); - if (!doc) - throw new BadRequestError({ - message: "Env doesn't exist", - name: "Re-order env" - }); + const lock = await keyStore + .acquireLock([KeyStorePrefixes.ProjectEnvironmentLock(projectId)], 5000) + .catch(() => null); - await projectEnvDAL.updateAllPosition(projectId, doc.position, -1, tx); - return doc; - }); - return env; + try { + if (!lock) { + await keyStore.waitTillReady({ + key: KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), + keyCheckCb: (val) => val === "true", + waitingCb: () => logger.debug("Delete project environment. Waiting for "), + delay: 500 + }); + } + + const env = await projectEnvDAL.transaction(async (tx) => { + const [doc] = await projectEnvDAL.delete({ id, projectId }, tx); + if (!doc) + throw new NotFoundError({ + message: `Environment with id '${id}' in project with ID '${projectId}' not found`, + name: "DeleteEnvironment" + }); + + return doc; + }); + + await keyStore.setItemWithExpiry( + KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), + 10, + "true" + ); + + return env; + } finally { + await lock?.release(); + } + }; + + const getEnvironmentById = async ({ actor, actorId, actorOrgId, actorAuthMethod, id }: TGetEnvDTO) => { + const environment = await projectEnvDAL.findById(id); + + if (!environment) { + throw new NotFoundError({ + message: `Environment with ID '${id}' not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + environment.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Environments); + + return environment; }; return { createEnvironment, updateEnvironment, - deleteEnvironment + deleteEnvironment, + getEnvironmentById }; }; diff --git a/backend/src/services/project-env/project-env-types.ts b/backend/src/services/project-env/project-env-types.ts index 1cd8c8dd03..a87c76d4db 100644 --- a/backend/src/services/project-env/project-env-types.ts +++ b/backend/src/services/project-env/project-env-types.ts @@ -3,6 +3,7 @@ import { TProjectPermission } from "@app/lib/types"; export type TCreateEnvDTO = { name: string; slug: string; + position?: number; } & TProjectPermission; export type TUpdateEnvDTO = { @@ -20,3 +21,7 @@ export type TReorderEnvDTO = { id: string; pos: number; } & TProjectPermission; + +export type TGetEnvDTO = { + id: string; +} & Omit; diff --git a/backend/src/services/project-key/project-key-dal.ts b/backend/src/services/project-key/project-key-dal.ts index d1b4053d0c..ea4ed813c1 100644 --- a/backend/src/services/project-key/project-key-dal.ts +++ b/backend/src/services/project-key/project-key-dal.ts @@ -16,7 +16,7 @@ export const projectKeyDALFactory = (db: TDbClient) => { tx?: Knex ): Promise<(TProjectKeys & { sender: { publicKey: string } }) | undefined> => { try { - const projectKey = await (tx || db)(TableName.ProjectKeys) + const projectKey = await (tx || db.replicaNode())(TableName.ProjectKeys) .join(TableName.Users, `${TableName.ProjectKeys}.senderId`, `${TableName.Users}.id`) .join(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`) .where({ projectId, receiverId: userId }) @@ -34,7 +34,7 @@ export const projectKeyDALFactory = (db: TDbClient) => { const findAllProjectUserPubKeys = async (projectId: string, tx?: Knex) => { try { - const pubKeys = await (tx || db)(TableName.ProjectMembership) + const pubKeys = await (tx || db.replicaNode())(TableName.ProjectMembership) .where({ projectId }) .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) .join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`) diff --git a/backend/src/services/project-membership/project-membership-dal.ts b/backend/src/services/project-membership/project-membership-dal.ts index 590c26ecce..bfd0c6f850 100644 --- a/backend/src/services/project-membership/project-membership-dal.ts +++ b/backend/src/services/project-membership/project-membership-dal.ts @@ -11,10 +11,15 @@ export const projectMembershipDALFactory = (db: TDbClient) => { const projectMemberOrm = ormify(db, TableName.ProjectMembership); // special query - const findAllProjectMembers = async (projectId: string, filter: { usernames?: string[]; username?: string } = {}) => { + const findAllProjectMembers = async ( + projectId: string, + filter: { usernames?: string[]; username?: string; id?: string } = {} + ) => { try { - const docs = await db(TableName.ProjectMembership) + const docs = await db + .replicaNode()(TableName.ProjectMembership) .where({ [`${TableName.ProjectMembership}.projectId` as "projectId"]: projectId }) + .join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`) .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) .where((qb) => { if (filter.usernames) { @@ -23,6 +28,9 @@ export const projectMembershipDALFactory = (db: TDbClient) => { if (filter.username) { void qb.where("username", filter.username); } + if (filter.id) { + void qb.where(`${TableName.ProjectMembership}.id`, filter.id); + } }) .join( TableName.UserEncryptionKey, @@ -41,6 +49,7 @@ export const projectMembershipDALFactory = (db: TDbClient) => { ) .select( db.ref("id").withSchema(TableName.ProjectMembership), + db.ref("createdAt").withSchema(TableName.ProjectMembership), db.ref("isGhost").withSchema(TableName.Users), db.ref("username").withSchema(TableName.Users), db.ref("email").withSchema(TableName.Users), @@ -57,17 +66,34 @@ export const projectMembershipDALFactory = (db: TDbClient) => { db.ref("isTemporary").withSchema(TableName.ProjectUserMembershipRole), db.ref("temporaryRange").withSchema(TableName.ProjectUserMembershipRole), db.ref("temporaryAccessStartTime").withSchema(TableName.ProjectUserMembershipRole), - db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole) + db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole), + db.ref("name").as("projectName").withSchema(TableName.Project) ) .where({ isGhost: false }); const members = sqlNestRelationships({ data: docs, - parentMapper: ({ email, firstName, username, lastName, publicKey, isGhost, id, userId }) => ({ + parentMapper: ({ + email, + firstName, + username, + lastName, + publicKey, + isGhost, + id, + userId, + projectName, + createdAt + }) => ({ id, userId, projectId, - user: { email, username, firstName, lastName, id: userId, publicKey, isGhost } + user: { email, username, firstName, lastName, id: userId, publicKey, isGhost }, + project: { + id: projectId, + name: projectName + }, + createdAt }), key: "id", childrenMapper: [ @@ -108,7 +134,7 @@ export const projectMembershipDALFactory = (db: TDbClient) => { const findProjectGhostUser = async (projectId: string, tx?: Knex) => { try { - const ghostUser = await (tx || db)(TableName.ProjectMembership) + const ghostUser = await (tx || db.replicaNode())(TableName.ProjectMembership) .where({ projectId }) .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) .select(selectAllTableCols(TableName.Users)) @@ -123,7 +149,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => { const findMembershipsByUsername = async (projectId: string, usernames: string[]) => { try { - const members = await db(TableName.ProjectMembership) + const members = await db + .replicaNode()(TableName.ProjectMembership) .where({ projectId }) .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) .join( @@ -149,13 +176,95 @@ export const projectMembershipDALFactory = (db: TDbClient) => { const findProjectMembershipsByUserId = async (orgId: string, userId: string) => { try { - const memberships = await db(TableName.ProjectMembership) - .where({ userId }) + const docs = await db + .replicaNode()(TableName.ProjectMembership) .join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`) - .where({ [`${TableName.Project}.orgId` as "orgId"]: orgId }) - .select(selectAllTableCols(TableName.ProjectMembership)); + .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) + .where(`${TableName.Users}.id`, userId) + .where(`${TableName.Project}.orgId`, orgId) + .join( + TableName.UserEncryptionKey, + `${TableName.UserEncryptionKey}.userId`, + `${TableName.Users}.id` + ) + .join( + TableName.ProjectUserMembershipRole, + `${TableName.ProjectUserMembershipRole}.projectMembershipId`, + `${TableName.ProjectMembership}.id` + ) + .leftJoin( + TableName.ProjectRoles, + `${TableName.ProjectUserMembershipRole}.customRoleId`, + `${TableName.ProjectRoles}.id` + ) + .select( + db.ref("id").withSchema(TableName.ProjectMembership), + db.ref("isGhost").withSchema(TableName.Users), + db.ref("username").withSchema(TableName.Users), + db.ref("email").withSchema(TableName.Users), + db.ref("publicKey").withSchema(TableName.UserEncryptionKey), + db.ref("firstName").withSchema(TableName.Users), + db.ref("lastName").withSchema(TableName.Users), + db.ref("id").withSchema(TableName.Users).as("userId"), + db.ref("role").withSchema(TableName.ProjectUserMembershipRole), + db.ref("id").withSchema(TableName.ProjectUserMembershipRole).as("membershipRoleId"), + db.ref("customRoleId").withSchema(TableName.ProjectUserMembershipRole), + db.ref("name").withSchema(TableName.ProjectRoles).as("customRoleName"), + db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"), + db.ref("temporaryMode").withSchema(TableName.ProjectUserMembershipRole), + db.ref("isTemporary").withSchema(TableName.ProjectUserMembershipRole), + db.ref("temporaryRange").withSchema(TableName.ProjectUserMembershipRole), + db.ref("temporaryAccessStartTime").withSchema(TableName.ProjectUserMembershipRole), + db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole), + db.ref("name").as("projectName").withSchema(TableName.Project), + db.ref("id").as("projectId").withSchema(TableName.Project) + ) + .where({ isGhost: false }); - return memberships; + const members = sqlNestRelationships({ + data: docs, + parentMapper: ({ email, firstName, username, lastName, publicKey, isGhost, id, projectId, projectName }) => ({ + id, + userId, + projectId, + user: { email, username, firstName, lastName, id: userId, publicKey, isGhost }, + project: { + id: projectId, + name: projectName + } + }), + key: "id", + childrenMapper: [ + { + label: "roles" as const, + key: "membershipRoleId", + mapper: ({ + role, + customRoleId, + customRoleName, + customRoleSlug, + membershipRoleId, + temporaryRange, + temporaryMode, + temporaryAccessEndTime, + temporaryAccessStartTime, + isTemporary + }) => ({ + id: membershipRoleId, + role, + customRoleId, + customRoleName, + customRoleSlug, + temporaryRange, + temporaryMode, + temporaryAccessEndTime, + temporaryAccessStartTime, + isTemporary + }) + } + ] + }); + return members; } catch (error) { throw new DatabaseError({ error, name: "Find project memberships by user id" }); } diff --git a/backend/src/services/project-membership/project-membership-service.ts b/backend/src/services/project-membership/project-membership-service.ts index a6682465f0..74b830c6df 100644 --- a/backend/src/services/project-membership/project-membership-service.ts +++ b/backend/src/services/project-membership/project-membership-service.ts @@ -2,26 +2,21 @@ import { ForbiddenError } from "@casl/ability"; import ms from "ms"; -import { - ProjectMembershipRole, - ProjectVersion, - SecretKeyEncoding, - TableName, - TProjectMemberships -} from "@app/db/schemas"; +import { ProjectMembershipRole, ProjectVersion, TableName } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { TProjectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal"; +import { isAtLeastAsPrivileged } from "@app/lib/casl"; import { getConfig } from "@app/lib/config/env"; -import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy } from "@app/lib/fn"; import { TUserGroupMembershipDALFactory } from "../../ee/services/group/user-group-membership-dal"; import { ActorType } from "../auth/auth-type"; +import { TGroupProjectDALFactory } from "../group-project/group-project-dal"; import { TOrgDALFactory } from "../org/org-dal"; import { TProjectDALFactory } from "../project/project-dal"; -import { assignWorkspaceKeysToMembers } from "../project/project-fns"; import { TProjectBotDALFactory } from "../project-bot/project-bot-dal"; import { TProjectKeyDALFactory } from "../project-key/project-key-dal"; import { TProjectRoleDALFactory } from "../project-role/project-role-dal"; @@ -31,28 +26,31 @@ import { TProjectMembershipDALFactory } from "./project-membership-dal"; import { ProjectUserMembershipTemporaryMode, TAddUsersToWorkspaceDTO, - TAddUsersToWorkspaceNonE2EEDTO, TDeleteProjectMembershipOldDTO, TDeleteProjectMembershipsDTO, + TGetProjectMembershipByIdDTO, TGetProjectMembershipByUsernameDTO, TGetProjectMembershipDTO, + TLeaveProjectDTO, TUpdateProjectMembershipDTO } from "./project-membership-types"; import { TProjectUserMembershipRoleDALFactory } from "./project-user-membership-role-dal"; type TProjectMembershipServiceFactoryDep = { - permissionService: Pick; + permissionService: Pick; smtpService: TSmtpService; projectBotDAL: TProjectBotDALFactory; projectMembershipDAL: TProjectMembershipDALFactory; projectUserMembershipRoleDAL: Pick; userDAL: Pick; userGroupMembershipDAL: TUserGroupMembershipDALFactory; - projectRoleDAL: Pick; + projectRoleDAL: Pick; orgDAL: Pick; - projectDAL: Pick; + projectDAL: Pick; projectKeyDAL: Pick; licenseService: Pick; + projectUserAdditionalPrivilegeDAL: Pick; + groupProjectDAL: TGroupProjectDALFactory; }; export type TProjectMembershipServiceFactory = ReturnType; @@ -63,10 +61,11 @@ export const projectMembershipServiceFactory = ({ projectUserMembershipRoleDAL, smtpService, projectRoleDAL, - projectBotDAL, orgDAL, + projectUserAdditionalPrivilegeDAL, userDAL, userGroupMembershipDAL, + groupProjectDAL, projectDAL, projectKeyDAL, licenseService @@ -76,6 +75,7 @@ export const projectMembershipServiceFactory = ({ actor, actorOrgId, actorAuthMethod, + includeGroupMembers, projectId }: TGetProjectMembershipDTO) => { const { permission } = await permissionService.getProjectPermission( @@ -87,7 +87,30 @@ export const projectMembershipServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member); - return projectMembershipDAL.findAllProjectMembers(projectId); + const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId); + + // projectMembers[0].project + if (includeGroupMembers) { + const groupMembers = await groupProjectDAL.findAllProjectGroupMembers(projectId); + const allMembers = [ + ...projectMembers.map((m) => ({ ...m, isGroupMember: false })), + ...groupMembers.map((m) => ({ ...m, isGroupMember: true })) + ]; + + // Ensure the userId is unique + const uniqueMembers: typeof allMembers = []; + const addedUserIds = new Set(); + allMembers.forEach((member) => { + if (!addedUserIds.has(member.user.id)) { + uniqueMembers.push(member); + addedUserIds.add(member.user.id); + } + }); + + return uniqueMembers; + } + + return projectMembers.map((m) => ({ ...m, isGroupMember: false })); }; const getProjectMembershipByUsername = async ({ @@ -108,7 +131,29 @@ export const projectMembershipServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member); const [membership] = await projectMembershipDAL.findAllProjectMembers(projectId, { username }); - if (!membership) throw new BadRequestError({ message: `Project membership not found for user ${username}` }); + if (!membership) throw new NotFoundError({ message: `Project membership not found for user '${username}'` }); + return membership; + }; + + const getProjectMembershipById = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + projectId, + id + }: TGetProjectMembershipByIdDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member); + + const [membership] = await projectMembershipDAL.findAllProjectMembers(projectId, { id }); + if (!membership) throw new NotFoundError({ message: `Project membership not found for user ${id}` }); return membership; }; @@ -122,7 +167,7 @@ export const projectMembershipServiceFactory = ({ sendEmails = true }: TAddUsersToWorkspaceDTO) => { const project = await projectDAL.findById(projectId); - if (!project) throw new BadRequestError({ message: "Project not found" }); + if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -195,146 +240,6 @@ export const projectMembershipServiceFactory = ({ return orgMembers; }; - const addUsersToProjectNonE2EE = async ({ - projectId, - actorId, - actorAuthMethod, - actor, - actorOrgId, - emails, - usernames, - sendEmails = true - }: TAddUsersToWorkspaceNonE2EEDTO) => { - const project = await projectDAL.findById(projectId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - - if (project.version === ProjectVersion.V1) { - throw new BadRequestError({ message: "Please upgrade your project on your dashboard" }); - } - - const { permission } = await permissionService.getProjectPermission( - actor, - actorId, - projectId, - actorAuthMethod, - actorOrgId - ); - ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Member); - - const usernamesAndEmails = [...emails, ...usernames]; - - const orgMembers = await orgDAL.findOrgMembersByUsername(project.orgId, [ - ...new Set(usernamesAndEmails.map((element) => element.toLowerCase())) - ]); - - if (orgMembers.length !== usernamesAndEmails.length) - throw new BadRequestError({ message: "Some users are not part of org" }); - - if (!orgMembers.length) return []; - - const existingMembers = await projectMembershipDAL.find({ - projectId, - $in: { userId: orgMembers.map(({ user }) => user.id).filter(Boolean) } - }); - if (existingMembers.length) throw new BadRequestError({ message: "Some users are already part of project" }); - - const ghostUser = await projectDAL.findProjectGhostUser(projectId); - - if (!ghostUser) { - throw new BadRequestError({ - message: "Failed to find sudo user" - }); - } - - const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId); - - if (!ghostUserLatestKey) { - throw new BadRequestError({ - message: "Failed to find sudo user latest key" - }); - } - - const bot = await projectBotDAL.findOne({ projectId }); - - if (!bot) { - throw new BadRequestError({ - message: "Failed to find bot" - }); - } - - const botPrivateKey = infisicalSymmetricDecrypt({ - keyEncoding: bot.keyEncoding as SecretKeyEncoding, - iv: bot.iv, - tag: bot.tag, - ciphertext: bot.encryptedPrivateKey - }); - - const newWsMembers = assignWorkspaceKeysToMembers({ - decryptKey: ghostUserLatestKey, - userPrivateKey: botPrivateKey, - members: orgMembers.map((membership) => ({ - orgMembershipId: membership.id, - projectMembershipRole: ProjectMembershipRole.Member, - userPublicKey: membership.user.publicKey - })) - }); - - const members: TProjectMemberships[] = []; - - const userIdsToExcludeForProjectKeyAddition = new Set( - await userGroupMembershipDAL.findUserGroupMembershipsInProject(usernamesAndEmails, projectId) - ); - - await projectMembershipDAL.transaction(async (tx) => { - const projectMemberships = await projectMembershipDAL.insertMany( - orgMembers.map(({ user }) => ({ - projectId, - userId: user.id - })), - tx - ); - await projectUserMembershipRoleDAL.insertMany( - projectMemberships.map(({ id }) => ({ projectMembershipId: id, role: ProjectMembershipRole.Member })), - tx - ); - - members.push(...projectMemberships); - - const encKeyGroupByOrgMembId = groupBy(newWsMembers, (i) => i.orgMembershipId); - await projectKeyDAL.insertMany( - orgMembers - .filter(({ user }) => !userIdsToExcludeForProjectKeyAddition.has(user.id)) - .map(({ user, id }) => ({ - encryptedKey: encKeyGroupByOrgMembId[id][0].workspaceEncryptedKey, - nonce: encKeyGroupByOrgMembId[id][0].workspaceEncryptedNonce, - senderId: ghostUser.id, - receiverId: user.id, - projectId - })), - tx - ); - }); - - if (sendEmails) { - const recipients = orgMembers.filter((i) => i.user.email).map((i) => i.user.email as string); - - const appCfg = getConfig(); - - if (recipients.length) { - await smtpService.sendMail({ - template: SmtpTemplates.WorkspaceInvite, - subjectLine: "Infisical project invitation", - recipients: orgMembers.filter((i) => i.user.email).map((i) => i.user.email as string), - substitutions: { - workspaceName: project.name, - callback_url: `${appCfg.SITE_URL}/login` - } - }); - } - } - return members; - }; - const updateProjectMembership = async ({ actorId, actor, @@ -355,10 +260,22 @@ export const projectMembershipServiceFactory = ({ const membershipUser = await userDAL.findUserByProjectMembershipId(membershipId); if (membershipUser?.isGhost || membershipUser?.projectId !== projectId) { - throw new BadRequestError({ - message: "Unauthorized member update", - name: "Update project membership" - }); + throw new ForbiddenRequestError({ message: "Forbidden member update" }); + } + + for await (const { role: requestedRoleChange } of roles) { + const { permission: rolePermission } = await permissionService.getProjectPermissionByRole( + requestedRoleChange, + projectId + ); + + const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission); + + if (!hasRequiredPriviledges) { + throw new ForbiddenRequestError({ + message: `Failed to change to a more privileged role ${requestedRoleChange}` + }); + } } // validate custom roles input @@ -380,7 +297,9 @@ export const projectMembershipServiceFactory = ({ $in: { slug: customInputRoles.map(({ role }) => role) } }) : []; - if (customRoles.length !== customInputRoles.length) throw new BadRequestError({ message: "Custom role not found" }); + if (customRoles.length !== customInputRoles.length) { + throw new NotFoundError({ message: "One or more custom roles not found" }); + } const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug); const sanitizedProjectMembershipRoles = roles.map((inputRole) => { @@ -436,9 +355,9 @@ export const projectMembershipServiceFactory = ({ const member = await userDAL.findUserByProjectMembershipId(membershipId); if (member?.isGhost) { - throw new BadRequestError({ - message: "Unauthorized member delete", - name: "Delete project membership" + throw new ForbiddenRequestError({ + message: "Forbidden membership deletion", + name: "DeleteProjectMembership" }); } @@ -471,9 +390,8 @@ export const projectMembershipServiceFactory = ({ const project = await projectDAL.findById(projectId); if (!project) { - throw new BadRequestError({ - message: "Project not found", - name: "Delete project membership" + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found` }); } @@ -502,6 +420,16 @@ export const projectMembershipServiceFactory = ({ ); const memberships = await projectMembershipDAL.transaction(async (tx) => { + await projectUserAdditionalPrivilegeDAL.delete( + { + projectId, + $in: { + userId: projectMembers.map((membership) => membership.user.id) + } + }, + tx + ); + const deletedMemberships = await projectMembershipDAL.delete( { projectId, @@ -531,13 +459,74 @@ export const projectMembershipServiceFactory = ({ return memberships; }; + const leaveProject = async ({ projectId, actorId, actor }: TLeaveProjectDTO) => { + if (actor !== ActorType.USER) { + throw new BadRequestError({ message: "Only users can leave projects" }); + } + + const project = await projectDAL.findById(projectId); + if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); + + if (project.version === ProjectVersion.V1) { + throw new BadRequestError({ + message: "Please ask your project administrator to upgrade the project before leaving." + }); + } + + const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId); + + if (!projectMembers?.length) { + throw new NotFoundError({ message: `Project members not found for project with ID '${projectId}'` }); + } + + if (projectMembers.length < 2) { + throw new BadRequestError({ message: "You cannot leave the project as you are the only member" }); + } + + const adminMembers = projectMembers.filter( + (member) => member.roles.map((r) => r.role).includes("admin") && member.userId !== actorId + ); + if (!adminMembers.length) { + throw new BadRequestError({ + message: "You cannot leave the project as you are the only admin. Promote another user to admin before leaving." + }); + } + + const deletedMembership = await projectMembershipDAL.transaction(async (tx) => { + await projectUserAdditionalPrivilegeDAL.delete( + { + projectId: project.id, + userId: actorId + }, + tx + ); + const membership = ( + await projectMembershipDAL.delete( + { + projectId: project.id, + userId: actorId + }, + tx + ) + )?.[0]; + return membership; + }); + + if (!deletedMembership) { + throw new BadRequestError({ message: "Failed to leave project" }); + } + + return deletedMembership; + }; + return { getProjectMemberships, getProjectMembershipByUsername, updateProjectMembership, - addUsersToProjectNonE2EE, deleteProjectMemberships, deleteProjectMembership, // TODO: Remove this - addUsersToProject + addUsersToProject, + leaveProject, + getProjectMembershipById }; }; diff --git a/backend/src/services/project-membership/project-membership-types.ts b/backend/src/services/project-membership/project-membership-types.ts index 1eab752657..68819f5ae6 100644 --- a/backend/src/services/project-membership/project-membership-types.ts +++ b/backend/src/services/project-membership/project-membership-types.ts @@ -1,6 +1,7 @@ import { TProjectPermission } from "@app/lib/types"; -export type TGetProjectMembershipDTO = TProjectPermission; +export type TGetProjectMembershipDTO = { includeGroupMembers?: boolean } & TProjectPermission; +export type TLeaveProjectDTO = Omit; export enum ProjectUserMembershipTemporaryMode { Relative = "relative" } @@ -13,6 +14,10 @@ export type TGetProjectMembershipByUsernameDTO = { username: string; } & TProjectPermission; +export type TGetProjectMembershipByIdDTO = { + id: string; +} & TProjectPermission; + export type TUpdateProjectMembershipDTO = { membershipId: string; roles: ( @@ -52,4 +57,5 @@ export type TAddUsersToWorkspaceNonE2EEDTO = { sendEmails?: boolean; emails: string[]; usernames: string[]; + roleSlugs?: string[]; } & TProjectPermission; diff --git a/backend/src/services/project-role/project-role-fns.ts b/backend/src/services/project-role/project-role-fns.ts new file mode 100644 index 0000000000..c465715a77 --- /dev/null +++ b/backend/src/services/project-role/project-role-fns.ts @@ -0,0 +1,52 @@ +import { ProjectMembershipRole } from "@app/db/schemas"; +import { + projectAdminPermissions, + projectMemberPermissions, + projectNoAccessPermissions, + projectViewerPermission +} from "@app/ee/services/permission/project-permission"; + +export const getPredefinedRoles = (projectId: string, roleFilter?: ProjectMembershipRole) => { + return [ + { + id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // dummy userid + projectId, + name: "Admin", + slug: ProjectMembershipRole.Admin, + permissions: projectAdminPermissions, + description: "Full administrative access over a project", + createdAt: new Date(), + updatedAt: new Date() + }, + { + id: "b11b49a9-09a9-4443-916a-4246f9ff2c70", // dummy user for zod validation in response + projectId, + name: "Developer", + slug: ProjectMembershipRole.Member, + permissions: projectMemberPermissions, + description: "Limited read/write role in a project", + createdAt: new Date(), + updatedAt: new Date() + }, + { + id: "b11b49a9-09a9-4443-916a-4246f9ff2c71", // dummy user for zod validation in response + projectId, + name: "Viewer", + slug: ProjectMembershipRole.Viewer, + permissions: projectViewerPermission, + description: "Only read role in a project", + createdAt: new Date(), + updatedAt: new Date() + }, + { + id: "b11b49a9-09a9-4443-916a-4246f9ff2c72", // dummy user for zod validation in response + projectId, + name: "No Access", + slug: ProjectMembershipRole.NoAccess, + permissions: projectNoAccessPermissions, + description: "No access to any resources in the project", + createdAt: new Date(), + updatedAt: new Date() + } + ].filter(({ slug }) => !roleFilter || roleFilter.includes(slug)); +}; diff --git a/backend/src/services/project-role/project-role-service.ts b/backend/src/services/project-role/project-role-service.ts index ffd446fadd..55564bc17e 100644 --- a/backend/src/services/project-role/project-role-service.ts +++ b/backend/src/services/project-role/project-role-service.ts @@ -1,26 +1,30 @@ import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability"; import { PackRule, packRules, unpackRules } from "@casl/ability/extra"; -import { ProjectMembershipRole } from "@app/db/schemas"; -import { UnpackedPermissionSchema } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service"; +import { ProjectMembershipRole, TableName } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { - projectAdminPermissions, - projectMemberPermissions, - projectNoAccessPermissions, ProjectPermissionActions, ProjectPermissionSet, - ProjectPermissionSub, - projectViewerPermission + ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission"; import { ActorAuthMethod } from "../auth/auth-type"; import { TIdentityProjectMembershipRoleDALFactory } from "../identity-project/identity-project-membership-role-dal"; import { TProjectDALFactory } from "../project/project-dal"; import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal"; import { TProjectRoleDALFactory } from "./project-role-dal"; -import { TCreateRoleDTO, TDeleteRoleDTO, TGetRoleBySlugDTO, TListRolesDTO, TUpdateRoleDTO } from "./project-role-types"; +import { getPredefinedRoles } from "./project-role-fns"; +import { + ProjectRoleServiceIdentifierType, + TCreateRoleDTO, + TDeleteRoleDTO, + TGetRoleDetailsDTO, + TListRolesDTO, + TUpdateRoleDTO +} from "./project-role-types"; type TProjectRoleServiceFactoryDep = { projectRoleDAL: TProjectRoleDALFactory; @@ -37,51 +41,6 @@ const unpackPermissions = (permissions: unknown) => unpackRules((permissions || []) as PackRule>>[]) ); -const getPredefinedRoles = (projectId: string, roleFilter?: ProjectMembershipRole) => { - return [ - { - id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // dummy userid - projectId, - name: "Admin", - slug: ProjectMembershipRole.Admin, - permissions: projectAdminPermissions, - description: "Full administrative access over a project", - createdAt: new Date(), - updatedAt: new Date() - }, - { - id: "b11b49a9-09a9-4443-916a-4246f9ff2c70", // dummy user for zod validation in response - projectId, - name: "Developer", - slug: ProjectMembershipRole.Member, - permissions: projectMemberPermissions, - description: "Limited read/write role in a project", - createdAt: new Date(), - updatedAt: new Date() - }, - { - id: "b11b49a9-09a9-4443-916a-4246f9ff2c71", // dummy user for zod validation in response - projectId, - name: "Viewer", - slug: ProjectMembershipRole.Viewer, - permissions: projectViewerPermission, - description: "Only read role in a project", - createdAt: new Date(), - updatedAt: new Date() - }, - { - id: "b11b49a9-09a9-4443-916a-4246f9ff2c72", // dummy user for zod validation in response - projectId, - name: "No Access", - slug: ProjectMembershipRole.NoAccess, - permissions: projectNoAccessPermissions, - description: "No access to any resources in the project", - createdAt: new Date(), - updatedAt: new Date() - } - ].filter(({ slug }) => !roleFilter || roleFilter.includes(slug)); -}; - export const projectRoleServiceFactory = ({ projectRoleDAL, permissionService, @@ -89,10 +48,15 @@ export const projectRoleServiceFactory = ({ projectUserMembershipRoleDAL, projectDAL }: TProjectRoleServiceFactoryDep) => { - const createRole = async ({ projectSlug, data, actor, actorId, actorAuthMethod, actorOrgId }: TCreateRoleDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; + const createRole = async ({ data, actor, actorId, actorAuthMethod, actorOrgId, filter }: TCreateRoleDTO) => { + let projectId = ""; + if (filter.type === ProjectRoleServiceIdentifierType.SLUG) { + const project = await projectDAL.findProjectBySlug(filter.projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: "Project not found" }); + projectId = project.id; + } else { + projectId = filter.projectId; + } const { permission } = await permissionService.getProjectPermission( actor, @@ -103,7 +67,10 @@ export const projectRoleServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Role); const existingRole = await projectRoleDAL.findOne({ slug: data.slug, projectId }); - if (existingRole) throw new BadRequestError({ name: "Create Role", message: "Duplicate role" }); + if (existingRole) { + throw new BadRequestError({ name: "Create Role", message: "Project role with same slug already exists" }); + } + const role = await projectRoleDAL.create({ ...data, projectId @@ -114,14 +81,19 @@ export const projectRoleServiceFactory = ({ const getRoleBySlug = async ({ actor, actorId, - projectSlug, actorAuthMethod, actorOrgId, - roleSlug - }: TGetRoleBySlugDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; + roleSlug, + filter + }: TGetRoleDetailsDTO) => { + let projectId = ""; + if (filter.type === ProjectRoleServiceIdentifierType.SLUG) { + const project = await projectDAL.findProjectBySlug(filter.projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: "Project not found" }); + projectId = project.id; + } else { + projectId = filter.projectId; + } const { permission } = await permissionService.getProjectPermission( actor, @@ -137,50 +109,45 @@ export const projectRoleServiceFactory = ({ } const customRole = await projectRoleDAL.findOne({ slug: roleSlug, projectId }); - if (!customRole) throw new BadRequestError({ message: "Role not found" }); + if (!customRole) throw new NotFoundError({ message: `Project role with slug '${roleSlug}' not found` }); return { ...customRole, permissions: unpackPermissions(customRole.permissions) }; }; - const updateRole = async ({ - roleId, - projectSlug, - actorOrgId, - actorAuthMethod, - actorId, - actor, - data - }: TUpdateRoleDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; + const updateRole = async ({ roleId, actorOrgId, actorAuthMethod, actorId, actor, data }: TUpdateRoleDTO) => { + const projectRole = await projectRoleDAL.findById(roleId); + if (!projectRole) throw new NotFoundError({ message: "Project role not found", name: "Delete role" }); const { permission } = await permissionService.getProjectPermission( actor, actorId, - projectId, + projectRole.projectId, actorAuthMethod, actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Role); + if (data?.slug) { - const existingRole = await projectRoleDAL.findOne({ slug: data.slug, projectId }); + const existingRole = await projectRoleDAL.findOne({ slug: data.slug, projectId: projectRole.projectId }); if (existingRole && existingRole.id !== roleId) - throw new BadRequestError({ name: "Update Role", message: "Duplicate role" }); + throw new BadRequestError({ name: "Update Role", message: "Project role with the same slug already exists" }); } - const [updatedRole] = await projectRoleDAL.update({ id: roleId, projectId }, data); - if (!updatedRole) throw new BadRequestError({ message: "Role not found", name: "Update role" }); + + const updatedRole = await projectRoleDAL.updateById(projectRole.id, { + ...data, + permissions: data.permissions ? data.permissions : undefined + }); + if (!updatedRole) throw new NotFoundError({ message: "Project role not found", name: "Update role" }); + return { ...updatedRole, permissions: unpackPermissions(updatedRole.permissions) }; }; - const deleteRole = async ({ actor, actorId, actorAuthMethod, actorOrgId, projectSlug, roleId }: TDeleteRoleDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; - + const deleteRole = async ({ actor, actorId, actorAuthMethod, actorOrgId, roleId }: TDeleteRoleDTO) => { + const projectRole = await projectRoleDAL.findById(roleId); + if (!projectRole) throw new NotFoundError({ message: "Project role not found", name: "Delete role" }); const { permission } = await permissionService.getProjectPermission( actor, actorId, - projectId, + projectRole.projectId, actorAuthMethod, actorOrgId ); @@ -202,16 +169,21 @@ export const projectRoleServiceFactory = ({ }); } - const [deletedRole] = await projectRoleDAL.delete({ id: roleId, projectId }); - if (!deletedRole) throw new BadRequestError({ message: "Role not found", name: "Delete role" }); + const deletedRole = await projectRoleDAL.deleteById(roleId); + if (!deletedRole) throw new NotFoundError({ message: "Project role not found", name: "Delete role" }); return { ...deletedRole, permissions: unpackPermissions(deletedRole.permissions) }; }; - const listRoles = async ({ projectSlug, actorOrgId, actorAuthMethod, actorId, actor }: TListRolesDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; + const listRoles = async ({ actorOrgId, actorAuthMethod, actorId, actor, filter }: TListRolesDTO) => { + let projectId = ""; + if (filter.type === ProjectRoleServiceIdentifierType.SLUG) { + const project = await projectDAL.findProjectBySlug(filter.projectSlug, actorOrgId); + if (!project) throw new BadRequestError({ message: "Project not found" }); + projectId = project.id; + } else { + projectId = filter.projectId; + } const { permission } = await permissionService.getProjectPermission( actor, @@ -221,7 +193,10 @@ export const projectRoleServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Role); - const customRoles = await projectRoleDAL.find({ projectId }); + const customRoles = await projectRoleDAL.find( + { projectId }, + { sort: [[`${TableName.ProjectRoles}.slug` as "slug", "asc"]] } + ); const roles = [...getPredefinedRoles(projectId), ...(customRoles || [])]; return roles; diff --git a/backend/src/services/project-role/project-role-types.ts b/backend/src/services/project-role/project-role-types.ts index 62b627a796..a71c731136 100644 --- a/backend/src/services/project-role/project-role-types.ts +++ b/backend/src/services/project-role/project-role-types.ts @@ -1,27 +1,36 @@ import { TOrgRolesUpdate, TProjectRolesInsert } from "@app/db/schemas"; import { TProjectPermission } from "@app/lib/types"; +export enum ProjectRoleServiceIdentifierType { + ID = "id", + SLUG = "slug" +} + export type TCreateRoleDTO = { data: Omit; - projectSlug: string; + filter: + | { type: ProjectRoleServiceIdentifierType.SLUG; projectSlug: string } + | { type: ProjectRoleServiceIdentifierType.ID; projectId: string }; } & Omit; -export type TGetRoleBySlugDTO = { +export type TGetRoleDetailsDTO = { roleSlug: string; - projectSlug: string; + filter: + | { type: ProjectRoleServiceIdentifierType.SLUG; projectSlug: string } + | { type: ProjectRoleServiceIdentifierType.ID; projectId: string }; } & Omit; export type TUpdateRoleDTO = { roleId: string; data: Omit; - projectSlug: string; } & Omit; export type TDeleteRoleDTO = { roleId: string; - projectSlug: string; } & Omit; export type TListRolesDTO = { - projectSlug: string; + filter: + | { type: ProjectRoleServiceIdentifierType.SLUG; projectSlug: string } + | { type: ProjectRoleServiceIdentifierType.ID; projectId: string }; } & Omit; diff --git a/backend/src/services/project/project-dal.ts b/backend/src/services/project/project-dal.ts index a4ec991571..4e74253263 100644 --- a/backend/src/services/project/project-dal.ts +++ b/backend/src/services/project/project-dal.ts @@ -2,7 +2,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { ProjectsSchema, ProjectUpgradeStatus, ProjectVersion, TableName, TProjectsUpdate } from "@app/db/schemas"; -import { BadRequestError, DatabaseError } from "@app/lib/errors"; +import { BadRequestError, DatabaseError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; import { Filter, ProjectFilterType } from "./project-types"; @@ -14,7 +14,8 @@ export const projectDALFactory = (db: TDbClient) => { const findAllProjects = async (userId: string) => { try { - const workspaces = await db(TableName.ProjectMembership) + const workspaces = await db + .replicaNode()(TableName.ProjectMembership) .where({ userId }) .join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`) .leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`) @@ -83,7 +84,7 @@ export const projectDALFactory = (db: TDbClient) => { const findProjectGhostUser = async (projectId: string, tx?: Knex) => { try { - const ghostUser = await (tx || db)(TableName.ProjectMembership) + const ghostUser = await (tx || db.replicaNode())(TableName.ProjectMembership) .where({ projectId }) .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) .select(selectAllTableCols(TableName.Users)) @@ -109,7 +110,8 @@ export const projectDALFactory = (db: TDbClient) => { const findAllProjectsByIdentity = async (identityId: string) => { try { - const workspaces = await db(TableName.IdentityProjectMembership) + const workspaces = await db + .replicaNode()(TableName.IdentityProjectMembership) .where({ identityId }) .join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`) .leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`) @@ -151,7 +153,8 @@ export const projectDALFactory = (db: TDbClient) => { const findProjectById = async (id: string) => { try { - const workspaces = await db(TableName.Project) + const workspaces = await db + .replicaNode()(TableName.Project) .where(`${TableName.Project}.id`, id) .leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`) .select( @@ -183,7 +186,7 @@ export const projectDALFactory = (db: TDbClient) => { })?.[0]; if (!project) { - throw new BadRequestError({ message: "Project not found" }); + throw new NotFoundError({ message: `Project with ID '${id}' not found` }); } return project; @@ -195,10 +198,11 @@ export const projectDALFactory = (db: TDbClient) => { const findProjectBySlug = async (slug: string, orgId: string | undefined) => { try { if (!orgId) { - throw new BadRequestError({ message: "Organization ID is required when querying with slugs" }); + throw new UnauthorizedError({ message: "Organization ID is required when querying with slugs" }); } - const projects = await db(TableName.Project) + const projects = await db + .replicaNode()(TableName.Project) .where(`${TableName.Project}.slug`, slug) .where(`${TableName.Project}.orgId`, orgId) .leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`) @@ -231,7 +235,7 @@ export const projectDALFactory = (db: TDbClient) => { })?.[0]; if (!project) { - throw new BadRequestError({ message: "Project not found" }); + throw new NotFoundError({ message: `Project with slug '${slug}' not found` }); } return project; @@ -247,7 +251,7 @@ export const projectDALFactory = (db: TDbClient) => { } if (filter.type === ProjectFilterType.SLUG) { if (!filter.orgId) { - throw new BadRequestError({ + throw new UnauthorizedError({ message: "Organization ID is required when querying with slugs" }); } @@ -275,6 +279,34 @@ export const projectDALFactory = (db: TDbClient) => { } }; + const findProjectWithOrg = async (projectId: string) => { + // we just need the project, and we need to include a new .organization field that includes the org from the orgId reference + + const project = await db(TableName.Project) + .where({ [`${TableName.Project}.id` as "id"]: projectId }) + + .join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.Project}.orgId`) + + .select( + db.ref("id").withSchema(TableName.Organization).as("organizationId"), + db.ref("name").withSchema(TableName.Organization).as("organizationName") + ) + .select(selectAllTableCols(TableName.Project)) + .first(); + + if (!project) { + throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); + } + + return { + ...ProjectsSchema.parse(project), + organization: { + id: project.organizationId, + name: project.organizationName + } + }; + }; + return { ...projectOrm, findAllProjects, @@ -284,6 +316,7 @@ export const projectDALFactory = (db: TDbClient) => { findProjectById, findProjectByFilter, findProjectBySlug, + findProjectWithOrg, checkProjectUpgradeStatus }; }; diff --git a/backend/src/services/project/project-fns.ts b/backend/src/services/project/project-fns.ts index 3ac75248df..92d0dfc39d 100644 --- a/backend/src/services/project/project-fns.ts +++ b/backend/src/services/project/project-fns.ts @@ -1,6 +1,10 @@ import crypto from "crypto"; +import { ProjectVersion, TProjects } from "@app/db/schemas"; import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto"; +import { NotFoundError } from "@app/lib/errors"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; import { AddUserToWsDTO } from "./project-types"; @@ -12,7 +16,7 @@ export const assignWorkspaceKeysToMembers = ({ members, decryptKey, userPrivateK privateKey: userPrivateKey }); - const newWsMembers = members.map(({ orgMembershipId, userPublicKey, projectMembershipRole }) => { + const newWsMembers = members.map(({ orgMembershipId, userPublicKey }) => { const { ciphertext: inviteeCipherText, nonce: inviteeNonce } = encryptAsymmetric( plaintextProjectKey, userPublicKey, @@ -21,7 +25,6 @@ export const assignWorkspaceKeysToMembers = ({ members, decryptKey, userPrivateK return { orgMembershipId, - projectRole: projectMembershipRole, workspaceEncryptedKey: inviteeCipherText, workspaceEncryptedNonce: inviteeNonce }; @@ -49,3 +52,53 @@ export const createProjectKey = ({ publicKey, privateKey, plainProjectKey }: TCr return { key: encryptedProjectKey, iv: encryptedProjectKeyIv }; }; + +export const verifyProjectVersions = (projects: Pick[], version: ProjectVersion) => { + for (const project of projects) { + if (project.version !== version) { + return false; + } + } + + return true; +}; + +export const getProjectKmsCertificateKeyId = async ({ + projectId, + projectDAL, + kmsService +}: { + projectId: string; + projectDAL: Pick; + kmsService: Pick; +}) => { + const keyId = await projectDAL.transaction(async (tx) => { + const project = await projectDAL.findOne({ id: projectId }, tx); + if (!project) { + throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); + } + + if (!project.kmsCertificateKeyId) { + // create default kms key for certificate service + const key = await kmsService.generateKmsKey({ + isReserved: true, + orgId: project.orgId, + tx + }); + + await projectDAL.updateById( + projectId, + { + kmsCertificateKeyId: key.id + }, + tx + ); + + return key.id; + } + + return project.kmsCertificateKeyId; + }); + + return keyId; +}; diff --git a/backend/src/services/project/project-queue.ts b/backend/src/services/project/project-queue.ts index 8f1e3fc3f0..d59bde6c17 100644 --- a/backend/src/services/project/project-queue.ts +++ b/backend/src/services/project/project-queue.ts @@ -300,8 +300,7 @@ export const projectQueueFactory = ({ members: [ { userPublicKey: user.publicKey, - orgMembershipId: orgMembership.id, - projectMembershipRole: ProjectMembershipRole.Admin + orgMembershipId: orgMembership.id } ] }); diff --git a/backend/src/services/project/project-service.ts b/backend/src/services/project/project-service.ts index f58fd77885..dfe2ce3ec9 100644 --- a/backend/src/services/project/project-service.ts +++ b/backend/src/services/project/project-service.ts @@ -1,33 +1,43 @@ import { ForbiddenError } from "@casl/ability"; import slugify from "@sindresorhus/slugify"; -import { OrgMembershipRole, ProjectMembershipRole, ProjectVersion } from "@app/db/schemas"; +import { OrgMembershipRole, ProjectMembershipRole, ProjectVersion, TProjectEnvironments } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service"; +import { InfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-types"; import { TKeyStoreFactory } from "@app/keystore/keystore"; import { isAtLeastAsPrivileged } from "@app/lib/casl"; -import { getConfig } from "@app/lib/config/env"; -import { createSecretBlindIndex } from "@app/lib/crypto"; import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; -import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { groupBy } from "@app/lib/fn"; import { alphaNumericNanoId } from "@app/lib/nanoid"; import { TProjectPermission } from "@app/lib/types"; import { ActorType } from "../auth/auth-type"; +import { TCertificateDALFactory } from "../certificate/certificate-dal"; +import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal"; +import { TCertificateTemplateDALFactory } from "../certificate-template/certificate-template-dal"; import { TIdentityOrgDALFactory } from "../identity/identity-org-dal"; import { TIdentityProjectDALFactory } from "../identity-project/identity-project-dal"; import { TIdentityProjectMembershipRoleDALFactory } from "../identity-project/identity-project-membership-role-dal"; +import { TKmsServiceFactory } from "../kms/kms-service"; import { TOrgDALFactory } from "../org/org-dal"; import { TOrgServiceFactory } from "../org/org-service"; +import { TPkiAlertDALFactory } from "../pki-alert/pki-alert-dal"; +import { TPkiCollectionDALFactory } from "../pki-collection/pki-collection-dal"; import { TProjectBotDALFactory } from "../project-bot/project-bot-dal"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; import { TProjectKeyDALFactory } from "../project-key/project-key-dal"; import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal"; import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal"; -import { TSecretBlindIndexDALFactory } from "../secret-blind-index/secret-blind-index-dal"; +import { TProjectRoleDALFactory } from "../project-role/project-role-dal"; +import { getPredefinedRoles } from "../project-role/project-role-fns"; import { ROOT_FOLDER_NAME, TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TProjectSlackConfigDALFactory } from "../slack/project-slack-config-dal"; +import { TSlackIntegrationDALFactory } from "../slack/slack-integration-dal"; import { TUserDALFactory } from "../user/user-dal"; import { TProjectDALFactory } from "./project-dal"; import { assignWorkspaceKeysToMembers, createProjectKey } from "./project-fns"; @@ -36,9 +46,21 @@ import { TCreateProjectDTO, TDeleteProjectDTO, TGetProjectDTO, + TGetProjectKmsKey, + TGetProjectSlackConfig, + TListProjectAlertsDTO, + TListProjectCasDTO, + TListProjectCertificateTemplatesDTO, + TListProjectCertsDTO, + TListProjectsDTO, + TLoadProjectKmsBackupDTO, TToggleProjectAutoCapitalizationDTO, + TUpdateAuditLogsRetentionDTO, TUpdateProjectDTO, + TUpdateProjectKmsDTO, TUpdateProjectNameDTO, + TUpdateProjectSlackConfig, + TUpdateProjectVersionLimitDTO, TUpgradeProjectDTO } from "./project-types"; @@ -49,6 +71,7 @@ export const DEFAULT_PROJECT_ENVS = [ ]; type TProjectServiceFactoryDep = { + // TODO: Pick projectDAL: TProjectDALFactory; projectQueue: TProjectQueueFactory; userDAL: TUserDALFactory; @@ -58,15 +81,32 @@ type TProjectServiceFactoryDep = { identityProjectDAL: TIdentityProjectDALFactory; identityProjectMembershipRoleDAL: Pick; projectKeyDAL: Pick; - projectBotDAL: Pick; projectMembershipDAL: Pick; + projectSlackConfigDAL: Pick; + slackIntegrationDAL: Pick; projectUserMembershipRoleDAL: Pick; - secretBlindIndexDAL: Pick; + certificateAuthorityDAL: Pick; + certificateDAL: Pick; + certificateTemplateDAL: Pick; + pkiAlertDAL: Pick; + pkiCollectionDAL: Pick; permissionService: TPermissionServiceFactory; orgService: Pick; licenseService: Pick; orgDAL: Pick; keyStore: Pick; + projectBotDAL: Pick; + projectRoleDAL: Pick; + kmsService: Pick< + TKmsServiceFactory, + | "updateProjectSecretManagerKmsKey" + | "getProjectKeyBackup" + | "loadProjectKeyBackup" + | "getKmsById" + | "getProjectSecretManagerKmsKeyId" + | "deleteInternalKms" + >; + projectTemplateService: TProjectTemplateServiceFactory; }; export type TProjectServiceFactory = ReturnType; @@ -81,15 +121,24 @@ export const projectServiceFactory = ({ folderDAL, orgService, identityProjectDAL, - projectBotDAL, identityOrgMembershipDAL, - secretBlindIndexDAL, projectMembershipDAL, projectEnvDAL, licenseService, projectUserMembershipRoleDAL, + projectRoleDAL, identityProjectMembershipRoleDAL, - keyStore + certificateAuthorityDAL, + certificateDAL, + certificateTemplateDAL, + pkiCollectionDAL, + pkiAlertDAL, + keyStore, + kmsService, + projectBotDAL, + projectSlackConfigDAL, + slackIntegrationDAL, + projectTemplateService }: TProjectServiceFactoryDep) => { /* * Create workspace. Make user the admin @@ -100,7 +149,11 @@ export const projectServiceFactory = ({ actorOrgId, actorAuthMethod, workspaceName, - slug: projectSlug + slug: projectSlug, + kmsKeyId, + tx: trx, + createDefaultEnvs = true, + template = InfisicalProjectTemplate.Default }: TCreateProjectDTO) => { const organization = await orgDAL.findOne({ id: actorOrgId }); @@ -113,9 +166,6 @@ export const projectServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace); - const appCfg = getConfig(); - const blindIndex = createSecretBlindIndex(appCfg.ROOT_ENCRYPTION_KEY, appCfg.ENCRYPTION_KEY); - const plan = await licenseService.getPlan(organization.id); if (plan.workspaceLimit !== null && plan.workspacesUsed >= plan.workspaceLimit) { // case: limit imposed on number of workspaces allowed @@ -125,18 +175,46 @@ export const projectServiceFactory = ({ }); } - const results = await projectDAL.transaction(async (tx) => { + const results = await (trx || projectDAL).transaction(async (tx) => { const ghostUser = await orgService.addGhostUser(organization.id, tx); + if (kmsKeyId) { + const kms = await kmsService.getKmsById(kmsKeyId, tx); + + if (kms.orgId !== organization.id) { + throw new ForbiddenRequestError({ + message: "KMS does not belong in the organization" + }); + } + } + + let projectTemplate: Awaited> | null = null; + + switch (template) { + case InfisicalProjectTemplate.Default: + projectTemplate = null; + break; + default: + projectTemplate = await projectTemplateService.findProjectTemplateByName(template, { + id: actorId, + orgId: organization.id, + type: actor, + authMethod: actorAuthMethod + }); + } + const project = await projectDAL.create( { name: workspaceName, orgId: organization.id, slug: projectSlug || slugify(`${workspaceName}-${alphaNumericNanoId(4)}`), - version: ProjectVersion.V2 + kmsSecretManagerKeyId: kmsKeyId, + version: ProjectVersion.V3, + pitVersionLimit: 10 }, tx ); + // set ghost user as admin of project const projectMembership = await projectMembershipDAL.create( { @@ -150,27 +228,35 @@ export const projectServiceFactory = ({ tx ); - // generate the blind index for project - await secretBlindIndexDAL.create( - { - projectId: project.id, - keyEncoding: blindIndex.keyEncoding, - saltIV: blindIndex.iv, - saltTag: blindIndex.tag, - algorithm: blindIndex.algorithm, - encryptedSaltCipherText: blindIndex.ciphertext - }, - tx - ); // set default environments and root folder for provided environments - const envs = await projectEnvDAL.insertMany( - DEFAULT_PROJECT_ENVS.map((el, i) => ({ ...el, projectId: project.id, position: i + 1 })), - tx - ); - await folderDAL.insertMany( - envs.map(({ id }) => ({ name: ROOT_FOLDER_NAME, envId: id, version: 1 })), - tx - ); + let envs: TProjectEnvironments[] = []; + if (projectTemplate) { + envs = await projectEnvDAL.insertMany( + projectTemplate.environments.map((env) => ({ ...env, projectId: project.id })), + tx + ); + await folderDAL.insertMany( + envs.map(({ id }) => ({ name: ROOT_FOLDER_NAME, envId: id, version: 1 })), + tx + ); + await projectRoleDAL.insertMany( + projectTemplate.packedRoles.map((role) => ({ + ...role, + permissions: JSON.stringify(role.permissions), + projectId: project.id + })), + tx + ); + } else if (createDefaultEnvs) { + envs = await projectEnvDAL.insertMany( + DEFAULT_PROJECT_ENVS.map((el, i) => ({ ...el, projectId: project.id, position: i + 1 })), + tx + ); + await folderDAL.insertMany( + envs.map(({ id }) => ({ name: ROOT_FOLDER_NAME, envId: id, version: 1 })), + tx + ); + } // 3. Create a random key that we'll use as the project key. const { key: encryptedProjectKey, iv: encryptedProjectKeyIv } = createProjectKey({ @@ -233,8 +319,7 @@ export const projectServiceFactory = ({ members: [ { userPublicKey: user.publicKey, - orgMembershipId: orgMembership.id, - projectMembershipRole: ProjectMembershipRole.Admin + orgMembershipId: orgMembership.id } ] }); @@ -248,7 +333,7 @@ export const projectServiceFactory = ({ tx ); await projectUserMembershipRoleDAL.create( - { projectMembershipId: userProjectMembership.id, role: projectAdmin.projectRole }, + { projectMembershipId: userProjectMembership.id, role: ProjectMembershipRole.Admin }, tx ); @@ -278,8 +363,8 @@ export const projectServiceFactory = ({ // If identity org membership not found, throw error if (!identityOrgMembership) { - throw new BadRequestError({ - message: `Failed to find identity with id ${actorId}` + throw new NotFoundError({ + message: `Failed to find identity with id '${actorId}'` }); } @@ -341,7 +426,12 @@ export const projectServiceFactory = ({ const deletedProject = await projectDAL.transaction(async (tx) => { const delProject = await projectDAL.deleteById(project.id, tx); const projectGhostUser = await projectMembershipDAL.findProjectGhostUser(project.id, tx).catch(() => null); - + if (delProject.kmsCertificateKeyId) { + await kmsService.deleteInternalKms(delProject.kmsCertificateKeyId, delProject.orgId, tx); + } + if (delProject.kmsSecretManagerKeyId) { + await kmsService.deleteInternalKms(delProject.kmsSecretManagerKeyId, delProject.orgId, tx); + } // Delete the org membership for the ghost user if it's found. if (projectGhostUser) { await userDAL.deleteById(projectGhostUser.id, tx); @@ -354,8 +444,34 @@ export const projectServiceFactory = ({ return deletedProject; }; - const getProjects = async (actorId: string) => { + const getProjects = async ({ actorId, includeRoles, actorAuthMethod, actorOrgId }: TListProjectsDTO) => { const workspaces = await projectDAL.findAllProjects(actorId); + + if (includeRoles) { + const { permission } = await permissionService.getUserOrgPermission(actorId, actorOrgId, actorAuthMethod); + + // `includeRoles` is specifically used by organization admins when inviting new users to the organizations to avoid looping redundant api calls. + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Member); + const customRoles = await projectRoleDAL.find({ + $in: { + projectId: workspaces.map((workspace) => workspace.id) + } + }); + + const workspaceMappedToRoles = groupBy(customRoles, (role) => role.projectId); + + const workspacesWithRoles = await Promise.all( + workspaces.map(async (workspace) => { + return { + ...workspace, + roles: [...(workspaceMappedToRoles[workspace.id] || []), ...getPredefinedRoles(workspace.id)] + }; + }) + ); + + return workspacesWithRoles; + } + return workspaces; }; @@ -406,6 +522,76 @@ export const projectServiceFactory = ({ return updatedProject; }; + const updateVersionLimit = async ({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + pitVersionLimit, + workspaceSlug + }: TUpdateProjectVersionLimitDTO) => { + const project = await projectDAL.findProjectBySlug(workspaceSlug, actorOrgId); + if (!project) { + throw new NotFoundError({ + message: `Project with slug '${workspaceSlug}' not found` + }); + } + + const { hasRole } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + + if (!hasRole(ProjectMembershipRole.Admin)) + throw new ForbiddenRequestError({ + message: "Insufficient privileges, only admins are allowed to take this action" + }); + + return projectDAL.updateById(project.id, { pitVersionLimit }); + }; + + const updateAuditLogsRetention = async ({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + auditLogsRetentionDays, + workspaceSlug + }: TUpdateAuditLogsRetentionDTO) => { + const project = await projectDAL.findProjectBySlug(workspaceSlug, actorOrgId); + if (!project) { + throw new NotFoundError({ + message: `Project with slug '${workspaceSlug}' not found` + }); + } + + const { hasRole } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + + if (!hasRole(ProjectMembershipRole.Admin)) { + throw new ForbiddenRequestError({ + message: "Insufficient privileges, only admins are allowed to take this action" + }); + } + + const plan = await licenseService.getPlan(project.orgId); + if (!plan.auditLogs || auditLogsRetentionDays > plan.auditLogsRetentionDays) { + throw new BadRequestError({ + message: "Failed to update audit logs retention due to plan limit reached. Upgrade plan to increase." + }); + } + + return projectDAL.updateById(project.id, { auditLogsRetentionDays }); + }; + const updateName = async ({ projectId, actor, @@ -484,14 +670,410 @@ export const projectServiceFactory = ({ const project = await projectDAL.findProjectById(projectId); if (!project) { - throw new BadRequestError({ - message: `Project with id ${projectId} not found` + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found` }); } return project.upgradeStatus || null; }; + /** + * Return list of CAs for project + */ + const listProjectCas = async ({ + status, + friendlyName, + commonName, + limit = 25, + offset = 0, + actorId, + actorOrgId, + actorAuthMethod, + filter, + actor + }: TListProjectCasDTO) => { + const project = await projectDAL.findProjectByFilter(filter); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateAuthorities + ); + + const cas = await certificateAuthorityDAL.find( + { + projectId: project.id, + ...(status && { status }), + ...(friendlyName && { friendlyName }), + ...(commonName && { commonName }) + }, + { offset, limit, sort: [["updatedAt", "desc"]] } + ); + + return cas; + }; + + /** + * Return list of certificates for project + */ + const listProjectCertificates = async ({ + limit = 25, + offset = 0, + friendlyName, + commonName, + actorId, + actorOrgId, + actorAuthMethod, + filter, + actor + }: TListProjectCertsDTO) => { + const project = await projectDAL.findProjectByFilter(filter); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates); + + const cas = await certificateAuthorityDAL.find({ projectId: project.id }); + + const certificates = await certificateDAL.find( + { + $in: { + caId: cas.map((ca) => ca.id) + }, + ...(friendlyName && { friendlyName }), + ...(commonName && { commonName }) + }, + { offset, limit, sort: [["updatedAt", "desc"]] } + ); + + const count = await certificateDAL.countCertificatesInProject({ + projectId: project.id, + friendlyName, + commonName + }); + + return { + certificates, + totalCount: count + }; + }; + + /** + * Return list of (PKI) alerts configured for project + */ + const listProjectAlerts = async ({ + projectId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TListProjectAlertsDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PkiAlerts); + + const alerts = await pkiAlertDAL.find({ projectId }); + + return { + alerts + }; + }; + + /** + * Return list of PKI collections for project + */ + const listProjectPkiCollections = async ({ + projectId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TListProjectAlertsDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PkiCollections); + + const pkiCollections = await pkiCollectionDAL.find({ projectId }); + + return { + pkiCollections + }; + }; + + /** + * Return list of certificate templates for project + */ + const listProjectCertificateTemplates = async ({ + projectId, + actorId, + actorOrgId, + actorAuthMethod, + actor + }: TListProjectCertificateTemplatesDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + ProjectPermissionSub.CertificateTemplates + ); + + const certificateTemplates = await certificateTemplateDAL.getCertTemplatesByProjectId(projectId); + + return { + certificateTemplates + }; + }; + + const updateProjectKmsKey = async ({ + projectId, + kms, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TUpdateProjectKmsDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Kms); + + const secretManagerKmsKey = await kmsService.updateProjectSecretManagerKmsKey({ + projectId, + kms + }); + + return { + secretManagerKmsKey + }; + }; + + const getProjectKmsBackup = async ({ + projectId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TProjectPermission) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Kms); + + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.externalKms) { + throw new BadRequestError({ + message: "Failed to get KMS backup due to plan restriction. Upgrade to the enterprise plan." + }); + } + + const kmsBackup = await kmsService.getProjectKeyBackup(projectId); + return kmsBackup; + }; + + const loadProjectKmsBackup = async ({ + projectId, + actor, + actorId, + actorAuthMethod, + actorOrgId, + backup + }: TLoadProjectKmsBackupDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Kms); + + const plan = await licenseService.getPlan(actorOrgId); + if (!plan.externalKms) { + throw new BadRequestError({ + message: "Failed to load KMS backup due to plan restriction. Upgrade to the enterprise plan." + }); + } + + const kmsBackup = await kmsService.loadProjectKeyBackup(projectId, backup); + return kmsBackup; + }; + + const getProjectKmsKeys = async ({ projectId, actor, actorId, actorAuthMethod, actorOrgId }: TGetProjectKmsKey) => { + const { membership } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + if (!membership) { + throw new ForbiddenRequestError({ message: "You are not a member of this project" }); + } + + const kmsKeyId = await kmsService.getProjectSecretManagerKmsKeyId(projectId); + const kmsKey = await kmsService.getKmsById(kmsKeyId); + + return { secretManagerKmsKey: kmsKey }; + }; + + const getProjectSlackConfig = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + projectId + }: TGetProjectSlackConfig) => { + const project = await projectDAL.findById(projectId); + if (!project) { + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Settings); + + return projectSlackConfigDAL.findOne({ + projectId: project.id + }); + }; + + const updateProjectSlackConfig = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + projectId, + slackIntegrationId, + isAccessRequestNotificationEnabled, + accessRequestChannels, + isSecretRequestNotificationEnabled, + secretRequestChannels + }: TUpdateProjectSlackConfig) => { + const project = await projectDAL.findById(projectId); + if (!project) { + throw new NotFoundError({ + message: `Project with ID '${projectId}' not found` + }); + } + + const slackIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails(slackIntegrationId); + + if (!slackIntegration) { + throw new NotFoundError({ + message: `Slack integration with ID '${slackIntegrationId}' not found` + }); + } + + if (slackIntegration.orgId !== actorOrgId) { + throw new ForbiddenRequestError({ + message: "Selected slack integration is not in the same organization" + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Settings); + + if (slackIntegration.orgId !== project.orgId) { + throw new ForbiddenRequestError({ + message: "Selected slack integration is not in the same organization" + }); + } + + return projectSlackConfigDAL.transaction(async (tx) => { + const slackConfig = await projectSlackConfigDAL.findOne( + { + projectId + }, + tx + ); + + if (slackConfig) { + return projectSlackConfigDAL.updateById( + slackConfig.id, + { + slackIntegrationId, + isAccessRequestNotificationEnabled, + accessRequestChannels, + isSecretRequestNotificationEnabled, + secretRequestChannels + }, + tx + ); + } + + return projectSlackConfigDAL.create( + { + projectId, + slackIntegrationId, + isAccessRequestNotificationEnabled, + accessRequestChannels, + isSecretRequestNotificationEnabled, + secretRequestChannels + }, + tx + ); + }); + }; + return { createProject, deleteProject, @@ -501,6 +1083,19 @@ export const projectServiceFactory = ({ getAProject, toggleAutoCapitalization, updateName, - upgradeProject + upgradeProject, + listProjectCas, + listProjectCertificates, + listProjectAlerts, + listProjectPkiCollections, + listProjectCertificateTemplates, + updateVersionLimit, + updateAuditLogsRetention, + updateProjectKmsKey, + getProjectKmsBackup, + loadProjectKmsBackup, + getProjectKmsKeys, + getProjectSlackConfig, + updateProjectSlackConfig }; }; diff --git a/backend/src/services/project/project-types.ts b/backend/src/services/project/project-types.ts index dcd424e18c..28cda2d951 100644 --- a/backend/src/services/project/project-types.ts +++ b/backend/src/services/project/project-types.ts @@ -1,7 +1,11 @@ -import { ProjectMembershipRole, TProjectKeys } from "@app/db/schemas"; +import { Knex } from "knex"; + +import { TProjectKeys } from "@app/db/schemas"; import { TProjectPermission } from "@app/lib/types"; import { ActorAuthMethod, ActorType } from "../auth/auth-type"; +import { CaStatus } from "../certificate-authority/certificate-authority-types"; +import { KmsType } from "../kms/kms-types"; export enum ProjectFilterType { ID = "id", @@ -26,6 +30,10 @@ export type TCreateProjectDTO = { actorOrgId?: string; workspaceName: string; slug?: string; + kmsKeyId?: string; + createDefaultEnvs?: boolean; + template?: string; + tx?: Knex; }; export type TDeleteProjectBySlugDTO = { @@ -43,6 +51,16 @@ export type TToggleProjectAutoCapitalizationDTO = { autoCapitalization: boolean; } & TProjectPermission; +export type TUpdateProjectVersionLimitDTO = { + pitVersionLimit: number; + workspaceSlug: string; +} & Omit; + +export type TUpdateAuditLogsRetentionDTO = { + auditLogsRetentionDays: number; + workspaceSlug: string; +} & Omit; + export type TUpdateProjectNameDTO = { name: string; } & TProjectPermission; @@ -62,6 +80,10 @@ export type TDeleteProjectDTO = { actorOrgId: string | undefined; } & Omit; +export type TListProjectsDTO = { + includeRoles: boolean; +} & Omit; + export type TUpgradeProjectDTO = { userPrivateKey: string; } & TProjectPermission; @@ -71,7 +93,47 @@ export type AddUserToWsDTO = { userPrivateKey: string; members: { orgMembershipId: string; - projectMembershipRole: ProjectMembershipRole; userPublicKey: string; }[]; }; + +export type TListProjectCasDTO = { + status?: CaStatus; + friendlyName?: string; + offset?: number; + limit?: number; + commonName?: string; + filter: Filter; +} & Omit; + +export type TListProjectCertsDTO = { + filter: Filter; + offset: number; + limit: number; + friendlyName?: string; + commonName?: string; +} & Omit; + +export type TListProjectAlertsDTO = TProjectPermission; + +export type TUpdateProjectKmsDTO = { + kms: { type: KmsType.Internal } | { type: KmsType.External; kmsId: string }; +} & TProjectPermission; + +export type TLoadProjectKmsBackupDTO = { + backup: string; +} & TProjectPermission; + +export type TGetProjectKmsKey = TProjectPermission; + +export type TListProjectCertificateTemplatesDTO = TProjectPermission; + +export type TGetProjectSlackConfig = TProjectPermission; + +export type TUpdateProjectSlackConfig = { + slackIntegrationId: string; + isAccessRequestNotificationEnabled: boolean; + accessRequestChannels: string; + isSecretRequestNotificationEnabled: boolean; + secretRequestChannels: string; +} & TProjectPermission; diff --git a/backend/src/services/resource-cleanup/resource-cleanup-queue.ts b/backend/src/services/resource-cleanup/resource-cleanup-queue.ts index afae2677f7..dab70806f4 100644 --- a/backend/src/services/resource-cleanup/resource-cleanup-queue.ts +++ b/backend/src/services/resource-cleanup/resource-cleanup-queue.ts @@ -1,13 +1,23 @@ import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal"; +import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal"; import { logger } from "@app/lib/logger"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal"; +import { TIdentityUaClientSecretDALFactory } from "../identity-ua/identity-ua-client-secret-dal"; +import { TSecretVersionDALFactory } from "../secret/secret-version-dal"; +import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal"; import { TSecretSharingDALFactory } from "../secret-sharing/secret-sharing-dal"; +import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal"; type TDailyResourceCleanUpQueueServiceFactoryDep = { auditLogDAL: Pick; identityAccessTokenDAL: Pick; + identityUniversalAuthClientSecretDAL: Pick; + secretVersionDAL: Pick; + secretVersionV2DAL: Pick; + secretFolderVersionDAL: Pick; + snapshotDAL: Pick; secretSharingDAL: Pick; queueService: TQueueServiceFactory; }; @@ -17,14 +27,24 @@ export type TDailyResourceCleanUpQueueServiceFactory = ReturnType { queueService.start(QueueName.DailyResourceCleanUp, async () => { logger.info(`${QueueName.DailyResourceCleanUp}: queue task started`); await auditLogDAL.pruneAuditLog(); await identityAccessTokenDAL.removeExpiredTokens(); + await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets(); await secretSharingDAL.pruneExpiredSharedSecrets(); + await snapshotDAL.pruneExcessSnapshots(); + await secretVersionDAL.pruneExcessVersions(); + await secretVersionV2DAL.pruneExcessVersions(); + await secretFolderVersionDAL.pruneExcessVersions(); logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`); }); diff --git a/backend/src/services/secret-blind-index/secret-blind-index-dal.ts b/backend/src/services/secret-blind-index/secret-blind-index-dal.ts index 8fa60cde7c..e26495ce30 100644 --- a/backend/src/services/secret-blind-index/secret-blind-index-dal.ts +++ b/backend/src/services/secret-blind-index/secret-blind-index-dal.ts @@ -12,7 +12,7 @@ export const secretBlindIndexDALFactory = (db: TDbClient) => { const countOfSecretsWithNullSecretBlindIndex = async (projectId: string, tx?: Knex) => { try { - const doc = await (tx || db)(TableName.Secret) + const doc = await (tx || db.replicaNode())(TableName.Secret) .leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Secret}.folderId`) .leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) .where({ projectId }) @@ -26,11 +26,10 @@ export const secretBlindIndexDALFactory = (db: TDbClient) => { const findAllSecretsByProjectId = async (projectId: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.Secret) + const docs = await (tx || db.replicaNode())(TableName.Secret) .leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Secret}.folderId`) .leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) .where({ projectId }) - .whereNull("secretBlindIndex") .select(selectAllTableCols(TableName.Secret)) .select( db.ref("slug").withSchema(TableName.Environment).as("environment"), @@ -44,12 +43,11 @@ export const secretBlindIndexDALFactory = (db: TDbClient) => { const findSecretsByProjectId = async (projectId: string, secretIds: string[], tx?: Knex) => { try { - const docs = await (tx || db)(TableName.Secret) + const docs = await (tx || db.replicaNode())(TableName.Secret) .leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Secret}.folderId`) .leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) .where({ projectId }) .whereIn(`${TableName.Secret}.id`, secretIds) - .whereNull("secretBlindIndex") .select(selectAllTableCols(TableName.Secret)) .select( db.ref("slug").withSchema(TableName.Environment).as("environment"), diff --git a/backend/src/services/secret-blind-index/secret-blind-index-service.ts b/backend/src/services/secret-blind-index/secret-blind-index-service.ts index bf2728e951..57746307a5 100644 --- a/backend/src/services/secret-blind-index/secret-blind-index-service.ts +++ b/backend/src/services/secret-blind-index/secret-blind-index-service.ts @@ -1,6 +1,6 @@ import { ProjectMembershipRole } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { TSecretDALFactory } from "../secret/secret-dal"; import { generateSecretBlindIndexBySalt } from "../secret/secret-fns"; @@ -52,7 +52,7 @@ export const secretBlindIndexServiceFactory = ({ actorOrgId ); if (!hasRole(ProjectMembershipRole.Admin)) { - throw new UnauthorizedError({ message: "User must be admin" }); + throw new ForbiddenRequestError({ message: "Insufficient privileges, user must be admin" }); } const secrets = await secretBlindIndexDAL.findAllSecretsByProjectId(projectId); @@ -75,17 +75,19 @@ export const secretBlindIndexServiceFactory = ({ actorOrgId ); if (!hasRole(ProjectMembershipRole.Admin)) { - throw new UnauthorizedError({ message: "User must be admin" }); + throw new ForbiddenRequestError({ message: "Insufficient privileges, user must be admin" }); } const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "CreateSecret" }); + if (!blindIndexCfg) + throw new NotFoundError({ message: `Blind index for project with ID '${projectId}' not found` }); const secrets = await secretBlindIndexDAL.findSecretsByProjectId( projectId, secretsToUpdate.map(({ secretId }) => secretId) ); - if (secrets.length !== secretsToUpdate.length) throw new BadRequestError({ message: "Secret not found" }); + if (secrets.length !== secretsToUpdate.length) + throw new NotFoundError({ message: "One or more secrets to update not found" }); const operations = await Promise.all( secretsToUpdate.map(async ({ secretName, secretId: id }) => { diff --git a/backend/src/services/secret-folder/secret-folder-dal.ts b/backend/src/services/secret-folder/secret-folder-dal.ts index 0e896d0c6f..7b25ad4ed7 100644 --- a/backend/src/services/secret-folder/secret-folder-dal.ts +++ b/backend/src/services/secret-folder/secret-folder-dal.ts @@ -5,6 +5,11 @@ import { TableName, TProjectEnvironments, TSecretFolders, TSecretFoldersUpdate } import { BadRequestError, DatabaseError } from "@app/lib/errors"; import { groupBy, removeTrailingSlash } from "@app/lib/fn"; import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; +import { isValidSecretPath } from "@app/lib/validator"; +import { SecretsOrderBy } from "@app/services/secret/secret-types"; + +import { TFindFoldersDeepByParentIdsDTO } from "./secret-folder-types"; export const validateFolderName = (folderName: string) => { const validNameRegex = /^[a-zA-Z0-9-_]+$/; @@ -83,7 +88,7 @@ const sqlFindMultipleFolderByEnvPathQuery = (db: Knex, query: Array<{ envId: str .from("parent"); }; -const sqlFindFolderByPathQuery = (db: Knex, projectId: string, environment: string, secretPath: string) => { +const sqlFindFolderByPathQuery = (db: Knex, projectId: string, environments: string[], secretPath: string) => { // this is removing an trailing slash like /folder1/folder2/ -> /folder1/folder2 const formatedPath = secretPath.at(-1) === "/" && secretPath.length > 1 ? secretPath.slice(0, -1) : secretPath; // next goal to sanitize saw the raw sql query is safe @@ -111,7 +116,7 @@ const sqlFindFolderByPathQuery = (db: Knex, projectId: string, environment: stri projectId, parentId: null }) - .where(`${TableName.Environment}.slug`, environment) + .whereIn(`${TableName.Environment}.slug`, environments) .select(selectAllTableCols(TableName.SecretFolder)) .union( (qb) => @@ -139,14 +144,14 @@ const sqlFindFolderByPathQuery = (db: Knex, projectId: string, environment: stri .from("parent") .leftJoin(TableName.Environment, `${TableName.Environment}.id`, "parent.envId") .select< - TSecretFolders & { + (TSecretFolders & { depth: number; path: string; envId: string; envSlug: string; envName: string; projectId: string; - } + })[] >( selectAllTableCols("parent" as TableName.SecretFolder), db.ref("id").withSchema(TableName.Environment).as("envId"), @@ -210,8 +215,19 @@ export const secretFolderDALFactory = (db: TDbClient) => { const secretFolderOrm = ormify(db, TableName.SecretFolder); const findBySecretPath = async (projectId: string, environment: string, path: string, tx?: Knex) => { + const isValidPath = isValidSecretPath(path); + if (!isValidPath) + throw new BadRequestError({ + message: "Invalid secret path. Only alphanumeric characters, dashes, and underscores are allowed." + }); + try { - const folder = await sqlFindFolderByPathQuery(tx || db, projectId, environment, removeTrailingSlash(path)) + const folder = await sqlFindFolderByPathQuery( + tx || db.replicaNode(), + projectId, + [environment], + removeTrailingSlash(path) + ) .orderBy("depth", "desc") .first(); if (folder && folder.path !== removeTrailingSlash(path)) { @@ -225,12 +241,58 @@ export const secretFolderDALFactory = (db: TDbClient) => { } }; + // finds folders by path for multiple envs + const findBySecretPathMultiEnv = async (projectId: string, environments: string[], path: string, tx?: Knex) => { + const isValidPath = isValidSecretPath(path); + if (!isValidPath) + throw new BadRequestError({ + message: "Invalid secret path. Only alphanumeric characters, dashes, and underscores are allowed." + }); + + try { + const pathDepth = removeTrailingSlash(path).split("/").filter(Boolean).length + 1; + + const folders = await sqlFindFolderByPathQuery( + tx || db.replicaNode(), + projectId, + environments, + removeTrailingSlash(path) + ) + .orderBy("depth", "desc") + .where("depth", pathDepth); + + const firstFolder = folders[0]; + + if (firstFolder && firstFolder.path !== removeTrailingSlash(path)) { + return []; + } + + return folders.map((folder) => { + const { envId: id, envName: name, envSlug: slug, ...el } = folder; + return { ...el, envId: id, environment: { id, name, slug } }; + }); + } catch (error) { + throw new DatabaseError({ error, name: "Find folders by secret path multi env" }); + } + }; + // used in folder creation // even if its the original given /path1/path2 // it will stop automatically at /path2 const findClosestFolder = async (projectId: string, environment: string, path: string, tx?: Knex) => { + const isValidPath = isValidSecretPath(path); + if (!isValidPath) + throw new BadRequestError({ + message: "Invalid secret path. Only alphanumeric characters, dashes, and underscores are allowed." + }); + try { - const folder = await sqlFindFolderByPathQuery(tx || db, projectId, environment, removeTrailingSlash(path)) + const folder = await sqlFindFolderByPathQuery( + tx || db.replicaNode(), + projectId, + [environment], + removeTrailingSlash(path) + ) .orderBy("depth", "desc") .first(); if (!folder) return; @@ -247,7 +309,7 @@ export const secretFolderDALFactory = (db: TDbClient) => { envId, secretPath: removeTrailingSlash(secretPath) })); - const folders = await sqlFindMultipleFolderByEnvPathQuery(tx || db, formatedQuery); + const folders = await sqlFindMultipleFolderByEnvPathQuery(tx || db.replicaNode(), formatedQuery); return formatedQuery.map(({ envId, secretPath }) => folders.find(({ path: targetPath, envId: targetEnvId }) => targetPath === secretPath && targetEnvId === envId) ); @@ -260,7 +322,7 @@ export const secretFolderDALFactory = (db: TDbClient) => { // that is instances in which for a given folderid find the secret path const findSecretPathByFolderIds = async (projectId: string, folderIds: string[], tx?: Knex) => { try { - const folders = await sqlFindSecretPathByFolderId(tx || db, projectId, folderIds); + const folders = await sqlFindSecretPathByFolderId(tx || db.replicaNode(), projectId, folderIds); // travelling all the way from leaf node to root contains real path const rootFolders = groupBy( @@ -299,33 +361,163 @@ export const secretFolderDALFactory = (db: TDbClient) => { const findById = async (id: string, tx?: Knex) => { try { - const folder = await (tx || db)(TableName.SecretFolder) + const folder = await (tx || db.replicaNode())(TableName.SecretFolder) .where({ [`${TableName.SecretFolder}.id` as "id"]: id }) .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .join(TableName.Project, `${TableName.Environment}.projectId`, `${TableName.Project}.id`) .select(selectAllTableCols(TableName.SecretFolder)) .select( db.ref("id").withSchema(TableName.Environment).as("envId"), db.ref("slug").withSchema(TableName.Environment).as("envSlug"), db.ref("name").withSchema(TableName.Environment).as("envName"), - db.ref("projectId").withSchema(TableName.Environment) + db.ref("projectId").withSchema(TableName.Environment), + db.ref("version").withSchema(TableName.Project).as("projectVersion") ) .first(); if (folder) { const { envId, envName, envSlug, ...el } = folder; - return { ...el, environment: { envId, envName, envSlug } }; + return { ...el, environment: { envId, envName, envSlug }, envId }; } } catch (error) { throw new DatabaseError({ error, name: "Find by id" }); } }; + // special query for project migration + const findByProjectId = async (projectId: string, tx?: Knex) => { + try { + const folders = await (tx || db.replicaNode())(TableName.SecretFolder) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .join(TableName.Project, `${TableName.Environment}.projectId`, `${TableName.Project}.id`) + .select(selectAllTableCols(TableName.SecretFolder)) + .where({ projectId }) + .select( + db.ref("id").withSchema(TableName.Environment).as("envId"), + db.ref("slug").withSchema(TableName.Environment).as("envSlug"), + db.ref("name").withSchema(TableName.Environment).as("envName"), + db.ref("projectId").withSchema(TableName.Environment), + db.ref("version").withSchema(TableName.Project).as("projectVersion") + ); + return folders; + } catch (error) { + throw new DatabaseError({ error, name: "Find by id" }); + } + }; + + // find project folders for multiple envs + const findByMultiEnv = async ( + { + environmentIds, + parentIds, + search, + limit, + offset = 0, + orderBy = SecretsOrderBy.Name, + orderDirection = OrderByDirection.ASC + }: { + environmentIds: string[]; + parentIds: string[]; + search?: string; + limit?: number; + offset?: number; + orderBy?: SecretsOrderBy; + orderDirection?: OrderByDirection; + }, + tx?: Knex + ) => { + try { + const query = (tx || db.replicaNode())(TableName.SecretFolder) + .whereIn("parentId", parentIds) + .whereIn("envId", environmentIds) + .where("isReserved", false) + .where((bd) => { + if (search) { + void bd.whereILike(`${TableName.SecretFolder}.name`, `%${search}%`); + } + }) + .leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) + .select( + selectAllTableCols(TableName.SecretFolder), + db.raw( + `DENSE_RANK() OVER (ORDER BY ${TableName.SecretFolder}."name" ${ + orderDirection ?? OrderByDirection.ASC + }) as rank` + ), + db.ref("slug").withSchema(TableName.Environment).as("environment") + ) + .orderBy(`${TableName.SecretFolder}.${orderBy}`, orderDirection); + + if (limit) { + const rankOffset = offset + 1; // ranks start from 1 + return await (tx || db) + .with("w", query) + .select("*") + .from[number]>("w") + .where("w.rank", ">=", rankOffset) + .andWhere("w.rank", "<", rankOffset + limit); + } + + const folders = await query; + + return folders; + } catch (error) { + throw new DatabaseError({ error, name: "Find folders multi env" }); + } + }; + + const findByEnvsDeep = async ({ parentIds }: TFindFoldersDeepByParentIdsDTO, tx?: Knex) => { + try { + const folders = await (tx || db.replicaNode()) + .withRecursive("parents", (qb) => + qb + .select( + selectAllTableCols(TableName.SecretFolder), + db.raw("0 as depth"), + db.raw(`'/' as path`), + db.ref(`${TableName.Environment}.slug`).as("environment") + ) + .from(TableName.SecretFolder) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .whereIn(`${TableName.SecretFolder}.id`, parentIds) + .union((un) => { + void un + .select( + selectAllTableCols(TableName.SecretFolder), + db.raw("parents.depth + 1 as depth"), + db.raw( + `CONCAT( + CASE WHEN parents.path = '/' THEN '' ELSE parents.path END, + CASE WHEN ${TableName.SecretFolder}."parentId" is NULL THEN '' ELSE CONCAT('/', secret_folders.name) END + )` + ), + db.ref("parents.environment") + ) + .from(TableName.SecretFolder) + .join("parents", `${TableName.SecretFolder}.parentId`, "parents.id"); + }) + ) + .select<(TSecretFolders & { path: string; depth: number; environment: string })[]>("*") + .from("parents") + .orderBy("depth") + .orderBy(`name`); + + return folders; + } catch (error) { + throw new DatabaseError({ error, name: "FindByEnvsDeep" }); + } + }; + return { ...secretFolderOrm, update, findBySecretPath, + findBySecretPathMultiEnv, findById, findByManySecretPath, findSecretPathByFolderIds, - findClosestFolder + findClosestFolder, + findByProjectId, + findByMultiEnv, + findByEnvsDeep }; }; diff --git a/backend/src/services/secret-folder/secret-folder-service.ts b/backend/src/services/secret-folder/secret-folder-service.ts index 97258c006c..d787520a2d 100644 --- a/backend/src/services/secret-folder/secret-folder-service.ts +++ b/backend/src/services/secret-folder/secret-folder-service.ts @@ -6,7 +6,8 @@ import { TSecretFoldersInsert } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { OrderByDirection, OrgServiceActor } from "@app/lib/types"; import { TProjectDALFactory } from "../project/project-dal"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; @@ -14,7 +15,9 @@ import { TSecretFolderDALFactory } from "./secret-folder-dal"; import { TCreateFolderDTO, TDeleteFolderDTO, + TGetFolderByIdDTO, TGetFolderDTO, + TGetFoldersDeepByEnvsDTO, TUpdateFolderDTO, TUpdateManyFoldersDTO } from "./secret-folder-types"; @@ -24,7 +27,7 @@ type TSecretFolderServiceFactoryDep = { permissionService: Pick; snapshotService: Pick; folderDAL: TSecretFolderDALFactory; - projectEnvDAL: Pick; + projectEnvDAL: Pick; folderVersionDAL: TSecretFolderVersionDALFactory; projectDAL: Pick; }; @@ -56,13 +59,18 @@ export const secretFolderServiceFactory = ({ actorAuthMethod, actorOrgId ); + ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Create, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretFolders, { environment, secretPath }) ); const env = await projectEnvDAL.findOne({ projectId, slug: environment }); - if (!env) throw new BadRequestError({ message: "Environment not found", name: "Create folder" }); + if (!env) { + throw new NotFoundError({ + message: `Environment with slug '${environment}' in project with ID '${projectId}' not found` + }); + } const folder = await folderDAL.transaction(async (tx) => { // the logic is simple we need to avoid creating same folder in same path multiple times @@ -72,7 +80,11 @@ export const secretFolderServiceFactory = ({ const pathWithFolder = path.join(secretPath, name); const parentFolder = await folderDAL.findClosestFolder(projectId, environment, pathWithFolder, tx); // no folder found is not possible root should be their - if (!parentFolder) throw new BadRequestError({ message: "Secret path not found" }); + if (!parentFolder) { + throw new NotFoundError({ + message: `Folder with path '${pathWithFolder}' in environment with slug '${environment}' not found` + }); + } // exact folder if (parentFolder.path === pathWithFolder) return parentFolder; @@ -135,7 +147,7 @@ export const secretFolderServiceFactory = ({ }: TUpdateManyFoldersDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); if (!project) { - throw new BadRequestError({ message: "Project not found" }); + throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); } const { permission } = await permissionService.getProjectPermission( @@ -149,7 +161,7 @@ export const secretFolderServiceFactory = ({ folders.forEach(({ environment, path: secretPath }) => { ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretFolders, { environment, secretPath }) ); }); @@ -160,12 +172,18 @@ export const secretFolderServiceFactory = ({ const parentFolder = await folderDAL.findBySecretPath(project.id, environment, secretPath); if (!parentFolder) { - throw new BadRequestError({ message: "Secret path not found", name: "Batch update folder" }); + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "UpdateManyFolders" + }); } const env = await projectEnvDAL.findOne({ projectId: project.id, slug: environment }); if (!env) { - throw new BadRequestError({ message: "Environment not found", name: "Batch update folder" }); + throw new NotFoundError({ + message: `Environment with slug '${environment}' in project with ID '${project.id}' not found`, + name: "UpdateManyFolders" + }); } const folder = await folderDAL .findOne({ envId: env.id, id, parentId: parentFolder.id }) @@ -174,7 +192,10 @@ export const secretFolderServiceFactory = ({ .catch(() => folderDAL.findOne({ envId: env.id, name: id, parentId: parentFolder.id })); if (!folder) { - throw new BadRequestError({ message: "Folder not found" }); + throw new NotFoundError({ + message: `Folder with id '${id}' in environment with slug '${env.slug}' not found`, + name: "UpdateManyFolders" + }); } if (name !== folder.name) { // ensure that new folder name is unique @@ -207,7 +228,10 @@ export const secretFolderServiceFactory = ({ tx ); if (!doc) { - throw new BadRequestError({ message: "Folder not found", name: "Batch update folder" }); + throw new NotFoundError({ + message: `Failed to update folder with id '${id}', not found`, + name: "UpdateManyFolders" + }); } return { oldFolder: folder, newFolder: doc }; @@ -242,23 +266,30 @@ export const secretFolderServiceFactory = ({ actorAuthMethod, actorOrgId ); + ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretFolders, { environment, secretPath }) ); const parentFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!parentFolder) throw new BadRequestError({ message: "Secret path not found" }); + if (!parentFolder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "UpdateFolder" + }); const env = await projectEnvDAL.findOne({ projectId, slug: environment }); - if (!env) throw new BadRequestError({ message: "Environment not found", name: "Update folder" }); + if (!env) { + throw new NotFoundError({ message: `Environment with slug '${environment}' not found`, name: "UpdateFolder" }); + } const folder = await folderDAL .findOne({ envId: env.id, id, parentId: parentFolder.id, isReserved: false }) // now folder api accepts id based change // this is for cli backward compatiability and when cli removes this, we will remove this logic .catch(() => folderDAL.findOne({ envId: env.id, name: id, parentId: parentFolder.id })); - if (!folder) throw new BadRequestError({ message: "Folder not found" }); + if (!folder) throw new NotFoundError({ message: `Folder with ID '${id}' not found`, name: "UpdateFolder" }); if (name !== folder.name) { // ensure that new folder name is unique const folderToCheck = await folderDAL.findOne({ @@ -270,7 +301,7 @@ export const secretFolderServiceFactory = ({ if (folderToCheck) { throw new BadRequestError({ message: "Folder with specified name already exists", - name: "Update folder" + name: "UpdateFolder" }); } } @@ -290,7 +321,7 @@ export const secretFolderServiceFactory = ({ }, tx ); - if (!doc) throw new BadRequestError({ message: "Folder not found", name: "Update folder" }); + if (!doc) throw new NotFoundError({ message: `Failed to update folder with ID '${id}'`, name: "UpdateFolder" }); return doc; }); @@ -315,17 +346,21 @@ export const secretFolderServiceFactory = ({ actorAuthMethod, actorOrgId ); + ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Delete, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretFolders, { environment, secretPath }) ); const env = await projectEnvDAL.findOne({ projectId, slug: environment }); - if (!env) throw new BadRequestError({ message: "Environment not found", name: "Create folder" }); + if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` }); const folder = await folderDAL.transaction(async (tx) => { const parentFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath, tx); - if (!parentFolder) throw new BadRequestError({ message: "Secret path not found" }); + if (!parentFolder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); const [doc] = await folderDAL.delete( { @@ -336,7 +371,7 @@ export const secretFolderServiceFactory = ({ }, tx ); - if (!doc) throw new BadRequestError({ message: "Folder not found", name: "Delete folder" }); + if (!doc) throw new NotFoundError({ message: `Failed to delete folder with ID '${idOrName}', not found` }); return doc; }); @@ -351,19 +386,152 @@ export const secretFolderServiceFactory = ({ actorOrgId, actorAuthMethod, environment, - path: secretPath + path: secretPath, + search, + orderBy, + orderDirection, + limit, + offset }: TGetFolderDTO) => { // folder list is allowed to be read by anyone // permission to check does user has access await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId); const env = await projectEnvDAL.findOne({ projectId, slug: environment }); - if (!env) throw new BadRequestError({ message: "Environment not found", name: "get folders" }); + if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` }); const parentFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath); if (!parentFolder) return []; - const folders = await folderDAL.find({ envId: env.id, parentId: parentFolder.id, isReserved: false }); + const folders = await folderDAL.find( + { + envId: env.id, + parentId: parentFolder.id, + isReserved: false, + $search: search ? { name: `%${search}%` } : undefined + }, + { + sort: orderBy ? [[orderBy, orderDirection ?? OrderByDirection.ASC]] : undefined, + limit, + offset + } + ); + return folders; + }; + + // get folders for multiple envs + const getFoldersMultiEnv = async ({ + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + environments, + path: secretPath, + ...params + }: Omit & { environments: string[] }) => { + // folder list is allowed to be read by anyone + // permission to check does user has access + await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId); + + const envs = await projectEnvDAL.findBySlugs(projectId, environments); + + if (!envs.length) + throw new NotFoundError({ + message: `Environments '${environments.join(", ")}' not found`, + name: "GetFoldersMultiEnv" + }); + + const parentFolders = await folderDAL.findBySecretPathMultiEnv(projectId, environments, secretPath); + if (!parentFolders.length) return []; + + const folders = await folderDAL.findByMultiEnv({ + environmentIds: envs.map((env) => env.id), + parentIds: parentFolders.map((folder) => folder.id), + ...params + }); + + return folders; + }; + + // get the unique count of folders within a project path + const getProjectFolderCount = async ({ + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + environments, + path: secretPath, + search + }: Omit & { environments: string[] }) => { + // folder list is allowed to be read by anyone + // permission to check does user has access + await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId); + + const envs = await projectEnvDAL.findBySlugs(projectId, environments); + + if (!envs.length) throw new NotFoundError({ message: `Environments '${environments.join(", ")}' not found` }); + + const parentFolders = await folderDAL.findBySecretPathMultiEnv(projectId, environments, secretPath); + if (!parentFolders.length) return 0; + + const folders = await folderDAL.find( + { + $in: { + envId: envs.map((env) => env.id), + parentId: parentFolders.map((folder) => folder.id) + }, + isReserved: false, + $search: search ? { name: `%${search}%` } : undefined + }, + { countDistinct: "name" } + ); + + return Number(folders[0]?.count ?? 0); + }; + + const getFolderById = async ({ actor, actorId, actorOrgId, actorAuthMethod, id }: TGetFolderByIdDTO) => { + const folder = await folderDAL.findById(id); + if (!folder) throw new NotFoundError({ message: `Folder with ID '${id}' not found` }); + // folder list is allowed to be read by anyone + // permission to check does user has access + await permissionService.getProjectPermission(actor, actorId, folder.projectId, actorAuthMethod, actorOrgId); + + const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(folder.projectId, [folder.id]); + + if (!folderWithPath) { + throw new NotFoundError({ + message: `Folder with ID '${folder.id}' in project with ID '${folder.projectId}' not found` + }); + } + + return { + ...folder, + path: folderWithPath.path + }; + }; + + const getFoldersDeepByEnvs = async ( + { projectId, environments, secretPath }: TGetFoldersDeepByEnvsDTO, + actor: OrgServiceActor + ) => { + // folder list is allowed to be read by anyone + // permission to check does user have access + await permissionService.getProjectPermission(actor.type, actor.id, projectId, actor.authMethod, actor.orgId); + + const envs = await projectEnvDAL.findBySlugs(projectId, environments); + + if (!envs.length) + throw new NotFoundError({ + message: `Environments '${environments.join(", ")}' not found`, + name: "GetFoldersDeep" + }); + + const parentFolders = await folderDAL.findBySecretPathMultiEnv(projectId, environments, secretPath); + if (!parentFolders.length) return []; + + const folders = await folderDAL.findByEnvsDeep({ parentIds: parentFolders.map((parent) => parent.id) }); return folders; }; @@ -373,6 +541,10 @@ export const secretFolderServiceFactory = ({ updateFolder, updateManyFolders, deleteFolder, - getFolders + getFolders, + getFolderById, + getProjectFolderCount, + getFoldersMultiEnv, + getFoldersDeepByEnvs }; }; diff --git a/backend/src/services/secret-folder/secret-folder-types.ts b/backend/src/services/secret-folder/secret-folder-types.ts index c01d5f7b8e..eb98809cd1 100644 --- a/backend/src/services/secret-folder/secret-folder-types.ts +++ b/backend/src/services/secret-folder/secret-folder-types.ts @@ -1,4 +1,5 @@ -import { TProjectPermission } from "@app/lib/types"; +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; +import { SecretsOrderBy } from "@app/services/secret/secret-types"; export enum ReservedFolders { SecretReplication = "__reserve_replication_" @@ -36,4 +37,23 @@ export type TDeleteFolderDTO = { export type TGetFolderDTO = { environment: string; path: string; + search?: string; + orderBy?: SecretsOrderBy; + orderDirection?: OrderByDirection; + limit?: number; + offset?: number; } & TProjectPermission; + +export type TGetFolderByIdDTO = { + id: string; +} & Omit; + +export type TGetFoldersDeepByEnvsDTO = { + projectId: string; + environments: string[]; + secretPath: string; +}; + +export type TFindFoldersDeepByParentIdsDTO = { + parentIds: string[]; +}; diff --git a/backend/src/services/secret-folder/secret-folder-version-dal.ts b/backend/src/services/secret-folder/secret-folder-version-dal.ts index 73b536b48e..78186333d6 100644 --- a/backend/src/services/secret-folder/secret-folder-version-dal.ts +++ b/backend/src/services/secret-folder/secret-folder-version-dal.ts @@ -4,6 +4,8 @@ import { TDbClient } from "@app/db"; import { TableName, TSecretFolderVersions } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; export type TSecretFolderVersionDALFactory = ReturnType; @@ -13,7 +15,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => { // This will fetch all latest secret versions from a folder const findLatestVersionByFolderId = async (folderId: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.SecretFolderVersion) + const docs = await (tx || db.replicaNode())(TableName.SecretFolderVersion) .join(TableName.SecretFolder, `${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`) .where({ parentId: folderId, isReserved: false }) .join( @@ -38,7 +40,9 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => { const findLatestFolderVersions = async (folderIds: string[], tx?: Knex) => { try { - const docs: Array = await (tx || db)(TableName.SecretFolderVersion) + const docs: Array = await (tx || db.replicaNode())( + TableName.SecretFolderVersion + ) .whereIn("folderId", folderIds) .join( (tx || db)(TableName.SecretFolderVersion) @@ -62,5 +66,34 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => { } }; - return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId }; + const pruneExcessVersions = async () => { + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions started`); + try { + await db(TableName.SecretFolderVersion) + .with("folder_cte", (qb) => { + void qb + .from(TableName.SecretFolderVersion) + .select( + "id", + "folderId", + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretFolderVersion}."folderId" ORDER BY ${TableName.SecretFolderVersion}."createdAt" DESC) AS row_num` + ) + ); + }) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("folder_cte", "folder_cte.id", `${TableName.SecretFolderVersion}.id`) + .whereRaw(`folder_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (error) { + throw new DatabaseError({ + error, + name: "Secret Folder Version Prune" + }); + } + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions completed`); + }; + + return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions }; }; diff --git a/backend/src/services/secret-import/secret-import-dal.ts b/backend/src/services/secret-import/secret-import-dal.ts index 0e73a8c230..da25f4d30f 100644 --- a/backend/src/services/secret-import/secret-import-dal.ts +++ b/backend/src/services/secret-import/secret-import-dal.ts @@ -49,10 +49,30 @@ export const secretImportDALFactory = (db: TDbClient) => { } }; - const find = async (filter: Partial, tx?: Knex) => { + const find = async ( + { + search, + limit, + offset, + ...filter + }: Partial< + TSecretImports & { + projectId: string; + search?: string; + limit?: number; + offset?: number; + } + >, + tx?: Knex + ) => { try { - const docs = await (tx || db)(TableName.SecretImport) + const query = (tx || db.replicaNode())(TableName.SecretImport) .where(filter) + .where((bd) => { + if (search) { + void bd.whereILike("importPath", `%${search}%`); + } + }) .join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`) .select( db.ref("*").withSchema(TableName.SecretImport) as unknown as keyof TSecretImports, @@ -61,6 +81,13 @@ export const secretImportDALFactory = (db: TDbClient) => { db.ref("id").withSchema(TableName.Environment).as("envId") ) .orderBy("position", "asc"); + + if (limit) { + void query.limit(limit).offset(offset ?? 0); + } + + const docs = await query; + return docs.map(({ envId, slug, name, ...el }) => ({ ...el, importEnv: { id: envId, slug, name } @@ -70,9 +97,59 @@ export const secretImportDALFactory = (db: TDbClient) => { } }; + const findById = async (id: string, tx?: Knex) => { + try { + const doc = await (tx || db.replicaNode())(TableName.SecretImport) + .where({ [`${TableName.SecretImport}.id` as "id"]: id }) + .join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`) + .select( + db.ref("*").withSchema(TableName.SecretImport) as unknown as keyof TSecretImports, + db.ref("slug").withSchema(TableName.Environment), + db.ref("name").withSchema(TableName.Environment), + db.ref("id").withSchema(TableName.Environment).as("envId") + ) + .first(); + + if (!doc) { + return null; + } + + const { envId, slug, name, ...el } = doc; + + return { + ...el, + importEnv: { id: envId, slug, name } + }; + } catch (error) { + throw new DatabaseError({ error, name: "Find secret imports" }); + } + }; + + const getProjectImportCount = async ( + { search, ...filter }: Partial, + tx?: Knex + ) => { + try { + const docs = await (tx || db.replicaNode())(TableName.SecretImport) + .where(filter) + .where("isReplication", false) + .where((bd) => { + if (search) { + void bd.whereILike("importPath", `%${search}%`); + } + }) + .join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`) + .count(); + + return Number(docs[0]?.count ?? 0); + } catch (error) { + throw new DatabaseError({ error, name: "get secret imports count" }); + } + }; + const findByFolderIds = async (folderIds: string[], tx?: Knex) => { try { - const docs = await (tx || db)(TableName.SecretImport) + const docs = await (tx || db.replicaNode())(TableName.SecretImport) .whereIn("folderId", folderIds) .where("isReplication", false) .join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`) @@ -95,8 +172,10 @@ export const secretImportDALFactory = (db: TDbClient) => { return { ...secretImportOrm, find, + findById, findByFolderIds, findLastImportPosition, - updateAllPosition + updateAllPosition, + getProjectImportCount }; }; diff --git a/backend/src/services/secret-import/secret-import-fns.ts b/backend/src/services/secret-import/secret-import-fns.ts index 06ffbc9030..d75a255140 100644 --- a/backend/src/services/secret-import/secret-import-fns.ts +++ b/backend/src/services/secret-import/secret-import-fns.ts @@ -1,8 +1,9 @@ -import { SecretType, TSecretImports, TSecrets } from "@app/db/schemas"; -import { groupBy } from "@app/lib/fn"; +import { SecretType, TSecretImports, TSecrets, TSecretsV2 } from "@app/db/schemas"; +import { groupBy, unique } from "@app/lib/fn"; import { TSecretDALFactory } from "../secret/secret-dal"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; import { TSecretImportDALFactory } from "./secret-import-dal"; type TSecretImportSecrets = { @@ -18,6 +19,29 @@ type TSecretImportSecrets = { secrets: (TSecrets & { workspace: string; environment: string; _id: string })[]; }; +type TSecretImportSecretsV2 = { + secretPath: string; + environment: string; + environmentInfo: { + id: string; + slug: string; + name: string; + }; + id: string; + folderId: string | undefined; + importFolderId: string; + secrets: (TSecretsV2 & { + workspace: string; + environment: string; + _id: string; + secretKey: string; + // akhilmhdh: yes i know you can put ?. + // But for somereason ts consider ? and undefined explicit as different just ts things + secretValue: string; + secretComment: string; + })[]; +}; + const LEVEL_BREAK = 10; const getImportUniqKey = (envSlug: string, path: string) => `${envSlug}=${path}`; export const fnSecretsFromImports = async ({ @@ -90,7 +114,7 @@ export const fnSecretsFromImports = async ({ const secretsFromdeeperImportGroupedByFolderId = groupBy(secretsFromDeeperImports, (i) => i.importFolderId); const secrets = allowedImports.map(({ importPath, importEnv, id, folderId }, i) => { - const sourceImportFolder = importedFolderGroupBySourceImport[`${importEnv.id}-${importPath}`][0]; + const sourceImportFolder = importedFolderGroupBySourceImport?.[`${importEnv.id}-${importPath}`]?.[0]; const folderDeeperImportSecrets = secretsFromdeeperImportGroupedByFolderId?.[sourceImportFolder?.id || ""]?.[0]?.secrets || []; @@ -115,3 +139,145 @@ export const fnSecretsFromImports = async ({ return secrets; }; + +/* eslint-disable no-await-in-loop, no-continue */ +export const fnSecretsV2FromImports = async ({ + secretImports: rootSecretImports, + folderDAL, + secretDAL, + secretImportDAL, + decryptor, + expandSecretReferences, + hasSecretAccess +}: { + secretImports: (Omit & { + importEnv: { id: string; slug: string; name: string }; + })[]; + folderDAL: Pick; + secretDAL: Pick; + secretImportDAL: Pick; + decryptor: (value?: Buffer | null) => string; + expandSecretReferences?: (inputSecret: { + value?: string; + skipMultilineEncoding?: boolean | null; + secretPath: string; + environment: string; + }) => Promise; + hasSecretAccess: (environment: string, secretPath: string, secretName: string, secretTagSlugs: string[]) => boolean; +}) => { + const cyclicDetector = new Set(); + const stack: { secretImports: typeof rootSecretImports; depth: number; parentImportedSecrets: TSecretsV2[] }[] = [ + { secretImports: rootSecretImports, depth: 0, parentImportedSecrets: [] } + ]; + + const processedImports: TSecretImportSecretsV2[] = []; + + while (stack.length) { + const { secretImports, depth, parentImportedSecrets } = stack.pop()!; + + if (depth > LEVEL_BREAK) continue; + const sanitizedImports = secretImports.filter( + ({ importPath, importEnv }) => !cyclicDetector.has(getImportUniqKey(importEnv.slug, importPath)) + ); + + if (!sanitizedImports.length) continue; + + const importedFolders = await folderDAL.findByManySecretPath( + sanitizedImports.map(({ importEnv, importPath }) => ({ + envId: importEnv.id, + secretPath: importPath + })) + ); + if (!importedFolders.length) continue; + + const importedFolderIds = importedFolders.map((el) => el?.id) as string[]; + const importedFolderGroupBySourceImport = groupBy(importedFolders, (i) => `${i?.envId}-${i?.path}`); + + const importedSecrets = await secretDAL.find( + { + $in: { folderId: importedFolderIds }, + type: SecretType.Shared + }, + { + sort: [["id", "asc"]] + } + ); + const importedSecretsGroupByFolderId = groupBy(importedSecrets, (i) => i.folderId); + + sanitizedImports.forEach(({ importPath, importEnv }) => { + cyclicDetector.add(getImportUniqKey(importEnv.slug, importPath)); + }); + // now we need to check recursively deeper imports made inside other imports + // we go level wise meaning we take all imports of a tree level and then go deeper ones level by level + const deeperImports = await secretImportDAL.findByFolderIds(importedFolderIds); + const deeperImportsGroupByFolderId = groupBy(deeperImports, (i) => i.folderId); + + const isFirstIteration = !processedImports.length; + sanitizedImports.forEach(({ importPath, importEnv, id, folderId }, i) => { + const sourceImportFolder = importedFolderGroupBySourceImport[`${importEnv.id}-${importPath}`]?.[0]; + const secretsWithDuplicate = (importedSecretsGroupByFolderId?.[importedFolders?.[i]?.id as string] || []) + .filter((item) => + hasSecretAccess( + importEnv.slug, + importPath, + item.key, + item.tags.map((el) => el.slug) + ) + ) + .map((item) => ({ + ...item, + secretKey: item.key, + secretValue: decryptor(item.encryptedValue), + secretComment: decryptor(item.encryptedComment), + environment: importEnv.slug, + workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend. + _id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend. + })); + + if (deeperImportsGroupByFolderId?.[sourceImportFolder?.id || ""]) { + stack.push({ + secretImports: deeperImportsGroupByFolderId[sourceImportFolder?.id || ""], + depth: depth + 1, + parentImportedSecrets: secretsWithDuplicate + }); + } + + if (isFirstIteration) { + processedImports.push({ + secretPath: importPath, + environment: importEnv.slug, + environmentInfo: importEnv, + folderId: importedFolders?.[i]?.id, + id, + importFolderId: folderId, + secrets: secretsWithDuplicate + }); + } else { + parentImportedSecrets.push(...secretsWithDuplicate); + } + }); + } + /* eslint-enable */ + if (expandSecretReferences) { + await Promise.allSettled( + processedImports.map((processedImport) => { + // eslint-disable-next-line + processedImport.secrets = unique(processedImport.secrets, (i) => i.key); + return Promise.allSettled( + processedImport.secrets.map(async (decryptedSecret, index) => { + const expandedSecretValue = await expandSecretReferences({ + value: decryptedSecret.secretValue, + secretPath: processedImport.secretPath, + environment: processedImport.environment, + skipMultilineEncoding: decryptedSecret.skipMultilineEncoding + }); + // eslint-disable-next-line no-param-reassign + processedImport.secrets[index].secretValue = expandedSecretValue || ""; + }) + ); + }) + ); + } + + return processedImports; +}; diff --git a/backend/src/services/secret-import/secret-import-service.ts b/backend/src/services/secret-import/secret-import-service.ts index 237c7cfe40..25e78fb659 100644 --- a/backend/src/services/secret-import/secret-import-service.ts +++ b/backend/src/services/secret-import/secret-import-service.ts @@ -7,18 +7,24 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { getReplicationFolderName } from "@app/ee/services/secret-replication/secret-replication-service"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; import { TProjectDALFactory } from "../project/project-dal"; +import { TProjectBotServiceFactory } from "../project-bot/project-bot-service"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; import { TSecretDALFactory } from "../secret/secret-dal"; +import { decryptSecretRaw } from "../secret/secret-fns"; import { TSecretQueueFactory } from "../secret/secret-queue"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; import { TSecretImportDALFactory } from "./secret-import-dal"; -import { fnSecretsFromImports } from "./secret-import-fns"; +import { fnSecretsFromImports, fnSecretsV2FromImports } from "./secret-import-fns"; import { TCreateSecretImportDTO, TDeleteSecretImportDTO, + TGetSecretImportByIdDTO, TGetSecretImportsDTO, TGetSecretsFromImportDTO, TResyncSecretImportReplicationDTO, @@ -29,11 +35,14 @@ type TSecretImportServiceFactoryDep = { secretImportDAL: TSecretImportDALFactory; folderDAL: TSecretFolderDALFactory; secretDAL: Pick; + secretV2BridgeDAL: Pick; + projectBotService: Pick; projectDAL: Pick; projectEnvDAL: TProjectEnvDALFactory; permissionService: Pick; secretQueueService: Pick; licenseService: Pick; + kmsService: Pick; }; const ERR_SEC_IMP_NOT_FOUND = new BadRequestError({ message: "Secret import not found" }); @@ -48,7 +57,10 @@ export const secretImportServiceFactory = ({ projectDAL, secretDAL, secretQueueService, - licenseService + licenseService, + projectBotService, + secretV2BridgeDAL, + kmsService }: TSecretImportServiceFactoryDep) => { const createImport = async ({ environment, @@ -72,12 +84,12 @@ export const secretImportServiceFactory = ({ // check if user has permission to import into destination path ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Create, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) ); // check if user has permission to import from target path ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Create, + ProjectPermissionActions.Read, subject(ProjectPermissionSub.Secrets, { environment: data.environment, secretPath: data.path @@ -95,10 +107,17 @@ export const secretImportServiceFactory = ({ await projectDAL.checkProjectUpgradeStatus(projectId); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create import" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); const [importEnv] = await projectEnvDAL.findBySlugs(projectId, [data.environment]); - if (!importEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" }); + if (!importEnv) { + throw new NotFoundError({ + error: `Imported environment with slug '${data.environment}' in project with ID '${projectId}' not found` + }); + } const sourceFolder = await folderDAL.findBySecretPath(projectId, data.environment, data.path); if (sourceFolder) { @@ -107,7 +126,7 @@ export const secretImportServiceFactory = ({ importEnv: folder.environment.id, importPath: secretPath }); - if (existingImport) throw new BadRequestError({ message: "Cyclic import not allowed" }); + if (existingImport) throw new BadRequestError({ message: `Cyclic import not allowed` }); } const secImport = await secretImportDAL.transaction(async (tx) => { @@ -179,11 +198,15 @@ export const secretImportServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) ); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update import" }); + if (!folder) { + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); + } const secImpDoc = await secretImportDAL.findOne({ folderId: folder.id, id }); if (!secImpDoc) throw ERR_SEC_IMP_NOT_FOUND; @@ -191,7 +214,11 @@ export const secretImportServiceFactory = ({ const importedEnv = data.environment // this is get env information of new one or old one ? (await projectEnvDAL.findBySlugs(projectId, [data.environment]))?.[0] : await projectEnvDAL.findById(secImpDoc.importEnv); - if (!importedEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" }); + if (!importedEnv) { + throw new NotFoundError({ + error: `Imported environment with slug '${data.environment}' in project with ID '${projectId}' not found` + }); + } const sourceFolder = await folderDAL.findBySecretPath( projectId, @@ -265,15 +292,18 @@ export const secretImportServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Delete, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) ); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Delete import" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); const secImport = await secretImportDAL.transaction(async (tx) => { const [doc] = await secretImportDAL.delete({ folderId: folder.id, id }, tx); - if (!doc) throw new BadRequestError({ name: "Sec imp del", message: "Secret import doc not found" }); + if (!doc) throw new NotFoundError({ message: `Secret import with folder ID '${id}' not found` }); if (doc.isReplication) { const replicationFolderPath = path.join(secretPath, getReplicationFolderName(doc.id)); const replicatedFolder = await folderDAL.findBySecretPath(projectId, environment, replicationFolderPath, tx); @@ -295,7 +325,11 @@ export const secretImportServiceFactory = ({ } const importEnv = await projectEnvDAL.findById(doc.importEnv); - if (!importEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" }); + if (!importEnv) { + throw new NotFoundError({ + error: `Imported environment with ID '${doc.importEnv}' in project with ID '${projectId}' not found` + }); + } return { ...doc, importEnv }; }); @@ -330,8 +364,8 @@ export const secretImportServiceFactory = ({ // check if user has permission to import into destination path ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Create, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) ); const plan = await licenseService.getPlan(actorOrgId); @@ -342,19 +376,24 @@ export const secretImportServiceFactory = ({ } const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update import" }); + if (!folder) { + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); + } const [secretImportDoc] = await secretImportDAL.find({ folderId: folder.id, [`${TableName.SecretImport}.id` as "id"]: secretImportDocId }); - if (!secretImportDoc) throw new BadRequestError({ message: "Failed to find secret import" }); + if (!secretImportDoc) + throw new NotFoundError({ message: `Secret import with ID '${secretImportDocId}' not found` }); if (!secretImportDoc.isReplication) throw new BadRequestError({ message: "Import is not in replication mode" }); // check if user has permission to import from target path ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Create, + ProjectPermissionActions.Read, subject(ProjectPermissionSub.Secrets, { environment: secretImportDoc.importEnv.slug, secretPath: secretImportDoc.importPath @@ -383,14 +422,15 @@ export const secretImportServiceFactory = ({ return { message: "replication started" }; }; - const getImports = async ({ + const getProjectImportCount = async ({ path: secretPath, environment, projectId, actor, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + search }: TGetSecretImportsDTO) => { const { permission } = await permissionService.getProjectPermission( actor, @@ -401,16 +441,120 @@ export const secretImportServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) ); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Get imports" }); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); - const secImports = await secretImportDAL.find({ folderId: folder.id }); + const count = await secretImportDAL.getProjectImportCount({ folderId: folder.id, search }); + + return count; + }; + + const getImports = async ({ + path: secretPath, + environment, + projectId, + actor, + actorId, + actorAuthMethod, + actorOrgId, + search, + limit, + offset + }: TGetSecretImportsDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); + + const secImports = await secretImportDAL.find({ folderId: folder.id, search, limit, offset }); return secImports; }; + const getImportById = async ({ + actor, + actorId, + actorAuthMethod, + actorOrgId, + id: importId + }: TGetSecretImportByIdDTO) => { + const importDoc = await secretImportDAL.findById(importId); + + if (!importDoc) { + throw new NotFoundError({ message: `Secret import with ID '${importId}' not found` }); + } + + // the folder to import into + const folder = await folderDAL.findById(importDoc.folderId); + + if (!folder) throw new NotFoundError({ message: `Secret import folder with ID '${importDoc.folderId}' not found` }); + + // the folder to import into, with path + const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(folder.projectId, [folder.id]); + + if (!folderWithPath) { + throw new NotFoundError({ + message: `Folder with ID '${folder.id}' in project with ID ${folder.projectId} not found` + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + folder.projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.SecretImports, { + environment: folder.environment.envSlug, + secretPath: folderWithPath.path + }) + ); + + const importIntoEnv = await projectEnvDAL.findOne({ + projectId: folder.projectId, + slug: folder.environment.envSlug + }); + + if (!importIntoEnv) { + throw new NotFoundError({ + message: `Environment with slug '${folder.environment.envSlug}' in project with ID ${folder.projectId} not found` + }); + } + + return { + ...importDoc, + projectId: folder.projectId, + secretPath: folderWithPath.path, + environment: { + id: importIntoEnv.id, + slug: importIntoEnv.slug, + name: importIntoEnv.name + } + }; + }; + const getSecretsFromImports = async ({ path: secretPath, environment, @@ -429,7 +573,44 @@ export const secretImportServiceFactory = ({ ); ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionActions.Read, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) + ); + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) return []; + // this will already order by position + // so anything based on this order will also be in right position + const secretImports = await secretImportDAL.find({ folderId: folder.id, isReplication: false }); + const allowedImports = secretImports.filter((el) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: el.importEnv.slug, + secretPath: el.importPath + }) + ) + ); + return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL, secretImportDAL }); + }; + + const getRawSecretsFromImports = async ({ + path: secretPath, + environment, + projectId, + actor, + actorAuthMethod, + actorId, + actorOrgId + }: TGetSecretsFromImportDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.SecretImports, { environment, secretPath }) ); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); if (!folder) return []; @@ -437,16 +618,59 @@ export const secretImportServiceFactory = ({ // so anything based on this order will also be in right position const secretImports = await secretImportDAL.find({ folderId: folder.id, isReplication: false }); - const allowedImports = secretImports.filter(({ importEnv, importPath }) => + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + const importedSecrets = await fnSecretsV2FromImports({ + secretImports, + folderDAL, + secretDAL: secretV2BridgeDAL, + secretImportDAL, + decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""), + hasSecretAccess: (expandEnvironment, expandSecretPath, expandSecretKey, expandSecretTags) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: expandEnvironment, + secretPath: expandSecretPath, + secretName: expandSecretKey, + secretTags: expandSecretTags + }) + ) + }); + return importedSecrets; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot not found for project with ID '${projectId}'. Please upgrade your project.`, + name: "bot_not_found_error" + }); + + const allowedImports = secretImports.filter((el) => permission.can( ProjectPermissionActions.Read, subject(ProjectPermissionSub.Secrets, { - environment: importEnv.slug, - secretPath: importPath + environment: el.importEnv.slug, + secretPath: el.importPath }) ) ); - return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL, secretImportDAL }); + const importedSecrets = await fnSecretsFromImports({ + allowedImports, + folderDAL, + secretDAL, + secretImportDAL + }); + return importedSecrets.map((el) => ({ + ...el, + secrets: el.secrets.map((encryptedSecret) => + decryptSecretRaw({ ...encryptedSecret, workspace: projectId, environment, secretPath }, botKey) + ) + })); }; return { @@ -454,8 +678,11 @@ export const secretImportServiceFactory = ({ updateImport, deleteImport, getImports, + getImportById, getSecretsFromImports, + getRawSecretsFromImports, resyncSecretImportReplication, + getProjectImportCount, fnSecretsFromImports }; }; diff --git a/backend/src/services/secret-import/secret-import-types.ts b/backend/src/services/secret-import/secret-import-types.ts index 01847738be..638e36cb1b 100644 --- a/backend/src/services/secret-import/secret-import-types.ts +++ b/backend/src/services/secret-import/secret-import-types.ts @@ -32,8 +32,15 @@ export type TDeleteSecretImportDTO = { export type TGetSecretImportsDTO = { environment: string; path: string; + search?: string; + limit?: number; + offset?: number; } & TProjectPermission; +export type TGetSecretImportByIdDTO = { + id: string; +} & Omit; + export type TGetSecretsFromImportDTO = { environment: string; path: string; diff --git a/backend/src/services/secret-sharing/secret-sharing-dal.ts b/backend/src/services/secret-sharing/secret-sharing-dal.ts index 6b5090d664..5c690b2667 100644 --- a/backend/src/services/secret-sharing/secret-sharing-dal.ts +++ b/backend/src/services/secret-sharing/secret-sharing-dal.ts @@ -1,27 +1,92 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; +import { TableName, TSecretSharing } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { ormify } from "@app/lib/knex"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; export type TSecretSharingDALFactory = ReturnType; export const secretSharingDALFactory = (db: TDbClient) => { const sharedSecretOrm = ormify(db, TableName.SecretSharing); + const countAllUserOrgSharedSecrets = async ({ orgId, userId }: { orgId: string; userId: string }) => { + try { + interface CountResult { + count: string; + } + + const count = await db + .replicaNode()(TableName.SecretSharing) + .where(`${TableName.SecretSharing}.orgId`, orgId) + .where(`${TableName.SecretSharing}.userId`, userId) + .count("*") + .first(); + + return parseInt((count as unknown as CountResult).count || "0", 10); + } catch (error) { + throw new DatabaseError({ error, name: "Count all user-org shared secrets" }); + } + }; + const pruneExpiredSharedSecrets = async (tx?: Knex) => { + logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret started`); try { const today = new Date(); - const docs = await (tx || db)(TableName.SecretSharing).where("expiresAt", "<", today).del(); + const docs = await (tx || db)(TableName.SecretSharing) + .where("expiresAt", "<", today) + .andWhere("encryptedValue", "<>", "") + .update({ + encryptedValue: "", + tag: "", + iv: "" + }); + logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret completed`); return docs; } catch (error) { throw new DatabaseError({ error, name: "pruneExpiredSharedSecrets" }); } }; + const findActiveSharedSecrets = async (filters: Partial, tx?: Knex) => { + try { + const now = new Date(); + return await (tx || db)(TableName.SecretSharing) + .where(filters) + .andWhere("expiresAt", ">", now) + .andWhere("encryptedValue", "<>", "") + .select(selectAllTableCols(TableName.SecretSharing)) + .orderBy("expiresAt", "asc"); + } catch (error) { + throw new DatabaseError({ + error, + name: "Find Active Shared Secrets" + }); + } + }; + + const softDeleteById = async (id: string) => { + try { + await sharedSecretOrm.updateById(id, { + encryptedValue: "", + iv: "", + tag: "" + }); + } catch (error) { + throw new DatabaseError({ + error, + name: "Soft Delete Shared Secret" + }); + } + }; + return { ...sharedSecretOrm, - pruneExpiredSharedSecrets + countAllUserOrgSharedSecrets, + pruneExpiredSharedSecrets, + softDeleteById, + findActiveSharedSecrets }; }; diff --git a/backend/src/services/secret-sharing/secret-sharing-service.ts b/backend/src/services/secret-sharing/secret-sharing-service.ts index ccbce0a524..171ab54db5 100644 --- a/backend/src/services/secret-sharing/secret-sharing-service.ts +++ b/backend/src/services/secret-sharing/secret-sharing-service.ts @@ -1,84 +1,290 @@ -import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; -import { UnauthorizedError } from "@app/lib/errors"; +import crypto from "node:crypto"; +import bcrypt from "bcrypt"; +import { z } from "zod"; + +import { TSecretSharing } from "@app/db/schemas"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { SecretSharingAccessType } from "@app/lib/types"; + +import { TKmsServiceFactory } from "../kms/kms-service"; +import { TOrgDALFactory } from "../org/org-dal"; import { TSecretSharingDALFactory } from "./secret-sharing-dal"; -import { TCreateSharedSecretDTO, TDeleteSharedSecretDTO, TSharedSecretPermission } from "./secret-sharing-types"; +import { + TCreatePublicSharedSecretDTO, + TCreateSharedSecretDTO, + TDeleteSharedSecretDTO, + TGetActiveSharedSecretByIdDTO, + TGetSharedSecretsDTO +} from "./secret-sharing-types"; type TSecretSharingServiceFactoryDep = { permissionService: Pick; secretSharingDAL: TSecretSharingDALFactory; + orgDAL: TOrgDALFactory; + kmsService: TKmsServiceFactory; }; export type TSecretSharingServiceFactory = ReturnType; +const isUuidV4 = (uuid: string) => z.string().uuid().safeParse(uuid).success; + export const secretSharingServiceFactory = ({ permissionService, - secretSharingDAL + secretSharingDAL, + orgDAL, + kmsService }: TSecretSharingServiceFactoryDep) => { - const createSharedSecret = async (createSharedSecretInput: TCreateSharedSecretDTO) => { - const { - actor, - actorId, - orgId, - actorAuthMethod, - actorOrgId, - encryptedValue, - iv, - tag, - hashedHex, - expiresAt, - expiresAfterViews - } = createSharedSecretInput; + const createSharedSecret = async ({ + actor, + actorId, + orgId, + actorAuthMethod, + actorOrgId, + secretValue, + name, + password, + accessType, + expiresAt, + expiresAfterViews + }: TCreateSharedSecretDTO) => { const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); - if (!permission) throw new UnauthorizedError({ name: "User not in org" }); + if (!permission) throw new ForbiddenRequestError({ name: "User is not a part of the specified organization" }); + + if (new Date(expiresAt) < new Date()) { + throw new BadRequestError({ message: "Expiration date cannot be in the past" }); + } + + // Limit Expiry Time to 1 month + const expiryTime = new Date(expiresAt).getTime(); + const currentTime = new Date().getTime(); + const thirtyDays = 30 * 24 * 60 * 60 * 1000; + if (expiryTime - currentTime > thirtyDays) { + throw new BadRequestError({ message: "Expiration date cannot be more than 30 days" }); + } + + if (secretValue.length > 10_000) { + throw new BadRequestError({ message: "Shared secret value too long" }); + } + + const encryptWithRoot = kmsService.encryptWithRootKey(); + + const encryptedSecret = encryptWithRoot(Buffer.from(secretValue)); + + const id = crypto.randomBytes(32).toString("hex"); + const hashedPassword = password ? await bcrypt.hash(password, 10) : null; + const newSharedSecret = await secretSharingDAL.create({ - encryptedValue, - iv, - tag, - hashedHex, - expiresAt, + identifier: id, + iv: null, + tag: null, + encryptedValue: null, + encryptedSecret, + name, + password: hashedPassword, + expiresAt: new Date(expiresAt), expiresAfterViews, userId: actorId, - orgId + orgId, + accessType }); - return { id: newSharedSecret.id }; + + const idToReturn = `${Buffer.from(newSharedSecret.identifier!, "hex").toString("base64url")}`; + + return { id: idToReturn }; }; - const getSharedSecrets = async (getSharedSecretsInput: TSharedSecretPermission) => { - const { actor, actorId, orgId, actorAuthMethod, actorOrgId } = getSharedSecretsInput; - const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); - if (!permission) throw new UnauthorizedError({ name: "User not in org" }); - const userSharedSecrets = await secretSharingDAL.find({ userId: actorId, orgId }, { sort: [["expiresAt", "asc"]] }); - return userSharedSecrets; - }; - - const getActiveSharedSecretByIdAndHashedHex = async (sharedSecretId: string, hashedHex: string) => { - const sharedSecret = await secretSharingDAL.findOne({ id: sharedSecretId, hashedHex }); - if (sharedSecret.expiresAt && sharedSecret.expiresAt < new Date()) { - return; + const createPublicSharedSecret = async ({ + password, + secretValue, + expiresAt, + expiresAfterViews, + accessType + }: TCreatePublicSharedSecretDTO) => { + if (new Date(expiresAt) < new Date()) { + throw new BadRequestError({ message: "Expiration date cannot be in the past" }); } - if (sharedSecret.expiresAfterViews != null && sharedSecret.expiresAfterViews >= 0) { - if (sharedSecret.expiresAfterViews === 0) { - await secretSharingDAL.deleteById(sharedSecretId); - return; + + // Limit Expiry Time to 1 month + const expiryTime = new Date(expiresAt).getTime(); + const currentTime = new Date().getTime(); + const thirtyDays = 30 * 24 * 60 * 60 * 1000; + if (expiryTime - currentTime > thirtyDays) { + throw new BadRequestError({ message: "Expiration date cannot exceed more than 30 days" }); + } + + const encryptWithRoot = kmsService.encryptWithRootKey(); + const encryptedSecret = encryptWithRoot(Buffer.from(secretValue)); + + const id = crypto.randomBytes(32).toString("hex"); + const hashedPassword = password ? await bcrypt.hash(password, 10) : null; + + const newSharedSecret = await secretSharingDAL.create({ + identifier: id, + encryptedValue: null, + iv: null, + tag: null, + encryptedSecret, + password: hashedPassword, + expiresAt: new Date(expiresAt), + expiresAfterViews, + accessType + }); + + return { id: `${Buffer.from(newSharedSecret.identifier!, "hex").toString("base64url")}` }; + }; + + const getSharedSecrets = async ({ + actor, + actorId, + actorAuthMethod, + actorOrgId, + offset, + limit + }: TGetSharedSecretsDTO) => { + if (!actorOrgId) throw new ForbiddenRequestError(); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + if (!permission) throw new ForbiddenRequestError({ name: "User does not belong to the specified organization" }); + + const secrets = await secretSharingDAL.find( + { + userId: actorId, + orgId: actorOrgId + }, + { offset, limit, sort: [["createdAt", "desc"]] } + ); + + const count = await secretSharingDAL.countAllUserOrgSharedSecrets({ + orgId: actorOrgId, + userId: actorId + }); + + return { + secrets, + totalCount: count + }; + }; + + const $decrementSecretViewCount = async (sharedSecret: TSecretSharing) => { + const { expiresAfterViews } = sharedSecret; + + if (expiresAfterViews) { + // decrement view count if view count expiry set + await secretSharingDAL.updateById(sharedSecret.id, { $decr: { expiresAfterViews: 1 } }); + } + + await secretSharingDAL.updateById(sharedSecret.id, { + lastViewedAt: new Date() + }); + }; + + /** Get's password-less secret. validates all secret's requested (must be fresh). */ + const getSharedSecretById = async ({ sharedSecretId, hashedHex, orgId, password }: TGetActiveSharedSecretByIdDTO) => { + const sharedSecret = isUuidV4(sharedSecretId) + ? await secretSharingDAL.findOne({ + id: sharedSecretId, + hashedHex + }) + : await secretSharingDAL.findOne({ + identifier: Buffer.from(sharedSecretId, "base64url").toString("hex") + }); + + if (!sharedSecret) + throw new NotFoundError({ + message: `Shared secret with ID '${sharedSecretId}' not found` + }); + + const { accessType, expiresAt, expiresAfterViews } = sharedSecret; + + const orgName = sharedSecret.orgId ? (await orgDAL.findOrgById(sharedSecret.orgId))?.name : ""; + + if (accessType === SecretSharingAccessType.Organization && orgId !== sharedSecret.orgId) + throw new ForbiddenRequestError(); + + // all secrets pass through here, meaning we check if its expired first and then check if it needs verification + // or can be safely sent to the client. + if (expiresAt !== null && expiresAt < new Date()) { + // check lifetime expiry + await secretSharingDAL.softDeleteById(sharedSecretId); + throw new ForbiddenRequestError({ + message: "Access denied: Secret has expired by lifetime" + }); + } + + if (expiresAfterViews !== null && expiresAfterViews === 0) { + // check view count expiry + await secretSharingDAL.softDeleteById(sharedSecretId); + throw new ForbiddenRequestError({ + message: "Access denied: Secret has expired by view count" + }); + } + + const isPasswordProtected = Boolean(sharedSecret.password); + const hasProvidedPassword = Boolean(password); + if (isPasswordProtected) { + if (hasProvidedPassword) { + const isMatch = await bcrypt.compare(password as string, sharedSecret.password as string); + if (!isMatch) throw new UnauthorizedError({ message: "Invalid credentials" }); + } else { + return { isPasswordProtected }; } - await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } }); } - return sharedSecret; + + // If encryptedSecret is set, we know that this secret has been encrypted using KMS, and we can therefore do server-side decryption. + let decryptedSecretValue: Buffer | undefined; + if (sharedSecret.encryptedSecret) { + const decryptWithRoot = kmsService.decryptWithRootKey(); + decryptedSecretValue = decryptWithRoot(sharedSecret.encryptedSecret); + } + + // decrement when we are sure the user will view secret. + await $decrementSecretViewCount(sharedSecret); + + return { + isPasswordProtected, + secret: { + ...sharedSecret, + ...(decryptedSecretValue && { + secretValue: Buffer.from(decryptedSecretValue).toString() + }), + orgName: + sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId + ? orgName + : undefined + } + }; }; const deleteSharedSecretById = async (deleteSharedSecretInput: TDeleteSharedSecretDTO) => { const { actor, actorId, orgId, actorAuthMethod, actorOrgId, sharedSecretId } = deleteSharedSecretInput; const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId); - if (!permission) throw new UnauthorizedError({ name: "User not in org" }); + if (!permission) throw new ForbiddenRequestError({ name: "User does not belong to the specified organization" }); + + const sharedSecret = isUuidV4(sharedSecretId) + ? await secretSharingDAL.findById(sharedSecretId) + : await secretSharingDAL.findOne({ identifier: sharedSecretId }); + + if (sharedSecret.orgId && sharedSecret.orgId !== orgId) + throw new ForbiddenRequestError({ message: "User does not have permission to delete shared secret" }); + const deletedSharedSecret = await secretSharingDAL.deleteById(sharedSecretId); + return deletedSharedSecret; }; return { createSharedSecret, + createPublicSharedSecret, getSharedSecrets, deleteSharedSecretById, - getActiveSharedSecretByIdAndHashedHex + getSharedSecretById }; }; diff --git a/backend/src/services/secret-sharing/secret-sharing-types.ts b/backend/src/services/secret-sharing/secret-sharing-types.ts index 5f35b28483..1d9efa1e3a 100644 --- a/backend/src/services/secret-sharing/secret-sharing-types.ts +++ b/backend/src/services/secret-sharing/secret-sharing-types.ts @@ -1,21 +1,43 @@ +import { SecretSharingAccessType, TGenericPermission } from "@app/lib/types"; + import { ActorAuthMethod, ActorType } from "../auth/auth-type"; +export type TGetSharedSecretsDTO = { + offset: number; + limit: number; +} & TGenericPermission; + export type TSharedSecretPermission = { actor: ActorType; actorId: string; actorAuthMethod: ActorAuthMethod; actorOrgId: string; orgId: string; + accessType?: SecretSharingAccessType; + name?: string; + password?: string; }; -export type TCreateSharedSecretDTO = { - encryptedValue: string; - iv: string; - tag: string; - hashedHex: string; - expiresAt: Date; - expiresAfterViews: number; -} & TSharedSecretPermission; +export type TCreatePublicSharedSecretDTO = { + secretValue: string; + expiresAt: string; + expiresAfterViews?: number; + password?: string; + accessType: SecretSharingAccessType; +}; + +export type TGetActiveSharedSecretByIdDTO = { + sharedSecretId: string; + hashedHex?: string; + orgId?: string; + password?: string; +}; + +export type TValidateActiveSharedSecretDTO = TGetActiveSharedSecretByIdDTO & { + password: string; +}; + +export type TCreateSharedSecretDTO = TSharedSecretPermission & TCreatePublicSharedSecretDTO; export type TDeleteSharedSecretDTO = { sharedSecretId: string; diff --git a/backend/src/services/secret-tag/secret-tag-dal.ts b/backend/src/services/secret-tag/secret-tag-dal.ts index f1ae2424ad..1df64afa20 100644 --- a/backend/src/services/secret-tag/secret-tag-dal.ts +++ b/backend/src/services/secret-tag/secret-tag-dal.ts @@ -3,17 +3,18 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { ormify } from "@app/lib/knex"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; export type TSecretTagDALFactory = ReturnType; export const secretTagDALFactory = (db: TDbClient) => { const secretTagOrm = ormify(db, TableName.SecretTag); const secretJnTagOrm = ormify(db, TableName.JnSecretTag); + const secretV2JnTagOrm = ormify(db, TableName.SecretV2JnTag); const findManyTagsById = async (projectId: string, ids: string[], tx?: Knex) => { try { - const tags = await (tx || db)(TableName.SecretTag).where({ projectId }).whereIn("id", ids); + const tags = await (tx || db.replicaNode())(TableName.SecretTag).where({ projectId }).whereIn("id", ids); return tags; } catch (error) { throw new DatabaseError({ error, name: "Find all by ids" }); @@ -34,10 +35,25 @@ export const secretTagDALFactory = (db: TDbClient) => { } }; + // special query for migration + const findSecretTagsByProjectId = async (projectId: string, tx?: Knex) => { + try { + const tags = await (tx || db.replicaNode())(TableName.JnSecretTag) + .join(TableName.SecretTag, `${TableName.JnSecretTag}.secret_tagsId`, `${TableName.SecretTag}.id`) + .where({ projectId }) + .select(selectAllTableCols(TableName.JnSecretTag)); + return tags; + } catch (error) { + throw new DatabaseError({ error, name: "Find all by ids" }); + } + }; return { ...secretTagOrm, saveTagsToSecret: secretJnTagOrm.insertMany, deleteTagsToSecret: secretJnTagOrm.delete, + saveTagsToSecretV2: secretV2JnTagOrm.batchInsert, + deleteTagsToSecretV2: secretV2JnTagOrm.delete, + findSecretTagsByProjectId, deleteTagsManySecret, findManyTagsById }; diff --git a/backend/src/services/secret-tag/secret-tag-service.ts b/backend/src/services/secret-tag/secret-tag-service.ts index ed8f5fec78..6cae3997a8 100644 --- a/backend/src/services/secret-tag/secret-tag-service.ts +++ b/backend/src/services/secret-tag/secret-tag-service.ts @@ -2,10 +2,17 @@ import { ForbiddenError } from "@casl/ability"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { TSecretTagDALFactory } from "./secret-tag-dal"; -import { TCreateTagDTO, TDeleteTagDTO, TListProjectTagsDTO } from "./secret-tag-types"; +import { + TCreateTagDTO, + TDeleteTagDTO, + TGetTagByIdDTO, + TGetTagBySlugDTO, + TListProjectTagsDTO, + TUpdateTagDTO +} from "./secret-tag-types"; type TSecretTagServiceFactoryDep = { secretTagDAL: TSecretTagDALFactory; @@ -15,16 +22,7 @@ type TSecretTagServiceFactoryDep = { export type TSecretTagServiceFactory = ReturnType; export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSecretTagServiceFactoryDep) => { - const createTag = async ({ - name, - slug, - actor, - color, - actorId, - actorOrgId, - actorAuthMethod, - projectId - }: TCreateTagDTO) => { + const createTag = async ({ slug, actor, color, actorId, actorOrgId, actorAuthMethod, projectId }: TCreateTagDTO) => { const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -39,17 +37,39 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe const newTag = await secretTagDAL.create({ projectId, - name, slug, color, - createdBy: actorId + createdBy: actorId, + createdByActorType: actor }); return newTag; }; + const updateTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, color, slug }: TUpdateTagDTO) => { + const tag = await secretTagDAL.findById(id); + if (!tag) throw new NotFoundError({ message: `Tag with ID '${id}' not found` }); + + if (slug) { + const existingTag = await secretTagDAL.findOne({ slug, projectId: tag.projectId }); + if (existingTag && existingTag.id !== tag.id) throw new BadRequestError({ message: "Tag already exist" }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + tag.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags); + + const updatedTag = await secretTagDAL.updateById(tag.id, { color, slug }); + return updatedTag; + }; + const deleteTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TDeleteTagDTO) => { const tag = await secretTagDAL.findById(id); - if (!tag) throw new BadRequestError({ message: "Tag doesn't exist" }); + if (!tag) throw new NotFoundError({ message: `Tag with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -64,6 +84,38 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe return deletedTag; }; + const getTagById = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TGetTagByIdDTO) => { + const tag = await secretTagDAL.findById(id); + if (!tag) throw new NotFoundError({ message: `Tag with ID '${id}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + tag.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags); + + return { ...tag, name: tag.slug }; + }; + + const getTagBySlug = async ({ actorId, actor, actorOrgId, actorAuthMethod, slug, projectId }: TGetTagBySlugDTO) => { + const tag = await secretTagDAL.findOne({ projectId, slug }); + if (!tag) throw new NotFoundError({ message: `Tag with slug '${slug}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + tag.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags); + + return { ...tag, name: tag.slug }; + }; + const getProjectTags = async ({ actor, actorId, actorOrgId, actorAuthMethod, projectId }: TListProjectTagsDTO) => { const { permission } = await permissionService.getProjectPermission( actor, @@ -78,5 +130,5 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe return tags; }; - return { createTag, deleteTag, getProjectTags }; + return { createTag, deleteTag, getProjectTags, getTagById, getTagBySlug, updateTag }; }; diff --git a/backend/src/services/secret-tag/secret-tag-types.ts b/backend/src/services/secret-tag/secret-tag-types.ts index d2f027153f..ede0c55a32 100644 --- a/backend/src/services/secret-tag/secret-tag-types.ts +++ b/backend/src/services/secret-tag/secret-tag-types.ts @@ -1,11 +1,24 @@ import { TProjectPermission } from "@app/lib/types"; export type TCreateTagDTO = { - name: string; color: string; slug: string; } & TProjectPermission; +export type TUpdateTagDTO = { + id: string; + slug?: string; + color?: string; +} & Omit; + +export type TGetTagByIdDTO = { + id: string; +} & Omit; + +export type TGetTagBySlugDTO = { + slug: string; +} & TProjectPermission; + export type TDeleteTagDTO = { id: string; } & Omit; diff --git a/backend/src/services/secret-v2-bridge/secret-v2-bridge-dal.ts b/backend/src/services/secret-v2-bridge/secret-v2-bridge-dal.ts new file mode 100644 index 0000000000..3bdd5783fa --- /dev/null +++ b/backend/src/services/secret-v2-bridge/secret-v2-bridge-dal.ts @@ -0,0 +1,593 @@ +import { Knex } from "knex"; +import { validate as uuidValidate } from "uuid"; + +import { TDbClient } from "@app/db"; +import { SecretsV2Schema, SecretType, TableName, TSecretsV2, TSecretsV2Update } from "@app/db/schemas"; +import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; +import { + buildFindFilter, + ormify, + selectAllTableCols, + sqlNestRelationships, + TFindFilter, + TFindOpt +} from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; +import { SecretsOrderBy } from "@app/services/secret/secret-types"; +import { TFindSecretsByFolderIdsFilter } from "@app/services/secret-v2-bridge/secret-v2-bridge-types"; + +export type TSecretV2BridgeDALFactory = ReturnType; + +export const secretV2BridgeDALFactory = (db: TDbClient) => { + const secretOrm = ormify(db, TableName.SecretV2); + + const findOne = async (filter: Partial, tx?: Knex) => { + try { + const docs = await (tx || db)(TableName.SecretV2) + .where(filter) + .leftJoin( + TableName.SecretV2JnTag, + `${TableName.SecretV2}.id`, + `${TableName.SecretV2JnTag}.${TableName.SecretV2}Id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .select(selectAllTableCols(TableName.SecretV2)) + .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) + .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")); + + const data = sqlNestRelationships({ + data: docs, + key: "id", + parentMapper: (el) => ({ _id: el.id, ...SecretsV2Schema.parse(el) }), + childrenMapper: [ + { + key: "tagId", + label: "tags" as const, + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ + id, + color, + slug, + name: slug + }) + } + ] + }); + return data?.[0]; + } catch (error) { + throw new DatabaseError({ error, name: `${TableName.SecretV2}: FindOne` }); + } + }; + + const find = async (filter: TFindFilter, { offset, limit, sort, tx }: TFindOpt = {}) => { + try { + const query = (tx || db)(TableName.SecretV2) + // eslint-disable-next-line @typescript-eslint/no-misused-promises + .where(buildFindFilter(filter)) + .leftJoin( + TableName.SecretV2JnTag, + `${TableName.SecretV2}.id`, + `${TableName.SecretV2JnTag}.${TableName.SecretV2}Id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .select(selectAllTableCols(TableName.SecretV2)) + .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) + .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")); + if (limit) void query.limit(limit); + if (offset) void query.offset(offset); + if (sort) { + void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls }))); + } + + const docs = await query; + const data = sqlNestRelationships({ + data: docs, + key: "id", + parentMapper: (el) => ({ _id: el.id, ...SecretsV2Schema.parse(el) }), + childrenMapper: [ + { + key: "tagId", + label: "tags" as const, + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ + id, + color, + slug, + name: slug + }) + } + ] + }); + return data; + } catch (error) { + throw new DatabaseError({ error, name: `${TableName.SecretV2}: Find` }); + } + }; + + const update = async (filter: Partial, data: Omit, tx?: Knex) => { + try { + const sec = await (tx || db)(TableName.SecretV2) + .where(filter) + .update(data) + .increment("version", 1) + .returning("*"); + return sec; + } catch (error) { + throw new DatabaseError({ error, name: "update secret" }); + } + }; + + const bulkUpdate = async ( + data: Array<{ filter: Partial; data: TSecretsV2Update }>, + + tx?: Knex + ) => { + try { + const secs = await Promise.all( + data.map(async ({ filter, data: updateData }) => { + const [doc] = await (tx || db)(TableName.SecretV2) + .where(filter) + .update(updateData) + .increment("version", 1) + .returning("*"); + if (!doc) throw new BadRequestError({ message: "Failed to update document" }); + return doc; + }) + ); + return secs; + } catch (error) { + throw new DatabaseError({ error, name: "bulk update secret" }); + } + }; + + const bulkUpdateNoVersionIncrement = async (data: TSecretsV2[], tx?: Knex) => { + try { + const existingSecrets = await secretOrm.find( + { + $in: { + id: data.map((el) => el.id) + } + }, + { tx } + ); + + if (existingSecrets.length !== data.length) { + throw new NotFoundError({ message: "One or more secrets was not found" }); + } + + if (data.length === 0) return []; + + const updatedSecrets = await (tx || db)(TableName.SecretV2) + .insert(data) + .onConflict("id") // this will cause a conflict then merge the data + .merge() // Merge the data with the existing data + .returning("*"); + + return updatedSecrets; + } catch (error) { + throw new DatabaseError({ error, name: "bulk update secret" }); + } + }; + + const deleteMany = async ( + data: Array<{ key: string; type: SecretType }>, + folderId: string, + userId: string, + tx?: Knex + ) => { + try { + const deletedSecrets = await (tx || db)(TableName.SecretV2) + .where({ folderId }) + .where((bd) => { + data.forEach((el) => { + void bd.orWhere({ + key: el.key, + type: el.type, + ...(el.type === SecretType.Personal ? { userId } : {}) + }); + // if shared is getting deleted then personal ones also should be deleted + if (el.type === SecretType.Shared) { + void bd.orWhere({ + key: el.key, + type: SecretType.Personal + }); + } + }); + }) + .delete() + .returning("*"); + return deletedSecrets; + } catch (error) { + throw new DatabaseError({ error, name: "delete many secret" }); + } + }; + + const findByFolderId = async (folderId: string, userId?: string, tx?: Knex) => { + try { + // check if not uui then userId id is null (corner case because service token's ID is not UUI in effort to keep backwards compatibility from mongo) + if (userId && !uuidValidate(userId)) { + // eslint-disable-next-line + userId = undefined; + } + + const secs = await (tx || db.replicaNode())(TableName.SecretV2) + .where({ folderId }) + .where((bd) => { + void bd.whereNull("userId").orWhere({ userId: userId || null }); + }) + .leftJoin( + TableName.SecretV2JnTag, + `${TableName.SecretV2}.id`, + `${TableName.SecretV2JnTag}.${TableName.SecretV2}Id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .select(selectAllTableCols(TableName.SecretV2)) + .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) + .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")) + .orderBy("id", "asc"); + + const data = sqlNestRelationships({ + data: secs, + key: "id", + parentMapper: (el) => ({ _id: el.id, ...SecretsV2Schema.parse(el) }), + childrenMapper: [ + { + key: "tagId", + label: "tags" as const, + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ + id, + color, + slug, + name: slug + }) + } + ] + }); + return data; + } catch (error) { + throw new DatabaseError({ error, name: "get all secret" }); + } + }; + + const getSecretTags = async (secretId: string, tx?: Knex) => { + try { + const tags = await (tx || db.replicaNode())(TableName.SecretV2JnTag) + .join(TableName.SecretTag, `${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`) + .where({ [`${TableName.SecretV2}Id` as const]: secretId }) + .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) + .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")); + + return tags.map((el) => ({ + id: el.tagId, + color: el.tagColor, + slug: el.tagSlug, + name: el.tagSlug + })); + } catch (error) { + throw new DatabaseError({ error, name: "get secret tags" }); + } + }; + + // get unique secret count by folder IDs + const countByFolderIds = async ( + folderIds: string[], + userId?: string, + tx?: Knex, + filters?: { + search?: string; + tagSlugs?: string[]; + } + ) => { + try { + // check if not uui then userId id is null (corner case because service token's ID is not UUI in effort to keep backwards compatibility from mongo) + if (userId && !uuidValidate(userId)) { + // eslint-disable-next-line no-param-reassign + userId = undefined; + } + + const query = (tx || db.replicaNode())(TableName.SecretV2) + .whereIn("folderId", folderIds) + .where((bd) => { + if (filters?.search) { + void bd.whereILike("key", `%${filters?.search}%`); + } + }) + .where((bd) => { + void bd.whereNull("userId").orWhere({ userId: userId || null }); + }) + .countDistinct("key"); + + // only need to join tags if filtering by tag slugs + const slugs = filters?.tagSlugs?.filter(Boolean); + if (slugs && slugs.length > 0) { + void query + .leftJoin( + TableName.SecretV2JnTag, + `${TableName.SecretV2}.id`, + `${TableName.SecretV2JnTag}.${TableName.SecretV2}Id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .whereIn("slug", slugs); + } + + const secrets = await query; + + return Number(secrets[0]?.count ?? 0); + } catch (error) { + throw new DatabaseError({ error, name: "get folder secret count" }); + } + }; + + const findByFolderIds = async ( + folderIds: string[], + userId?: string, + tx?: Knex, + filters?: TFindSecretsByFolderIdsFilter + ) => { + try { + // check if not uui then userId id is null (corner case because service token's ID is not UUI in effort to keep backwards compatibility from mongo) + if (userId && !uuidValidate(userId)) { + // eslint-disable-next-line no-param-reassign + userId = undefined; + } + + const query = (tx || db.replicaNode())(TableName.SecretV2) + .whereIn(`${TableName.SecretV2}.folderId`, folderIds) + .where((bd) => { + if (filters?.search) { + if (filters?.includeTagsInSearch) { + void bd + .whereILike(`${TableName.SecretV2}.key`, `%${filters?.search}%`) + .orWhereILike(`${TableName.SecretTag}.slug`, `%${filters?.search}%`); + } else { + void bd.whereILike(`${TableName.SecretV2}.key`, `%${filters?.search}%`); + } + } + + if (filters?.keys) { + void bd.whereIn(`${TableName.SecretV2}.key`, filters.keys); + } + }) + .where((bd) => { + void bd.whereNull(`${TableName.SecretV2}.userId`).orWhere({ userId: userId || null }); + }) + .leftJoin( + TableName.SecretV2JnTag, + `${TableName.SecretV2}.id`, + `${TableName.SecretV2JnTag}.${TableName.SecretV2}Id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .select( + selectAllTableCols(TableName.SecretV2), + db.raw(`DENSE_RANK() OVER (ORDER BY "key" ${filters?.orderDirection ?? OrderByDirection.ASC}) as rank`) + ) + .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) + .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")) + .where((bd) => { + const slugs = filters?.tagSlugs?.filter(Boolean); + if (slugs && slugs.length > 0) { + void bd.whereIn(`${TableName.SecretTag}.slug`, slugs); + } + }) + .orderBy( + filters?.orderBy === SecretsOrderBy.Name ? "key" : "id", + filters?.orderDirection ?? OrderByDirection.ASC + ); + + let secs: Awaited; + + if (filters?.limit) { + const rankOffset = (filters?.offset ?? 0) + 1; // ranks start at 1 + secs = await (tx || db) + .with("w", query) + .select("*") + .from[number]>("w") + .where("w.rank", ">=", rankOffset) + .andWhere("w.rank", "<", rankOffset + filters.limit); + } else { + secs = await query; + } + + const data = sqlNestRelationships({ + data: secs, + key: "id", + parentMapper: (el) => ({ _id: el.id, ...SecretsV2Schema.parse(el) }), + childrenMapper: [ + { + key: "tagId", + label: "tags" as const, + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ + id, + color, + slug, + name: slug + }) + } + ] + }); + return data; + } catch (error) { + throw new DatabaseError({ error, name: "get all secret" }); + } + }; + + const findBySecretKeys = async ( + folderId: string, + query: Array<{ key: string; type: SecretType.Shared } | { key: string; type: SecretType.Personal; userId: string }>, + tx?: Knex + ) => { + if (!query.length) return []; + try { + const secrets = await (tx || db.replicaNode())(TableName.SecretV2) + .where({ folderId }) + .where((bd) => { + query.forEach((el) => { + if (el.type === SecretType.Personal && !el.userId) { + throw new BadRequestError({ message: "Missing personal user id" }); + } + void bd.orWhere({ + key: el.key, + type: el.type, + userId: el.type === SecretType.Personal ? el.userId : null + }); + }); + }); + return secrets; + } catch (error) { + throw new DatabaseError({ error, name: "find by blind indexes" }); + } + }; + + const upsertSecretReferences = async ( + data: { + secretId: string; + references: Array<{ environment: string; secretPath: string; secretKey: string }>; + }[] = [], + tx?: Knex + ) => { + try { + if (!data.length) return; + + await (tx || db)(TableName.SecretReferenceV2) + .whereIn( + "secretId", + data.map(({ secretId }) => secretId) + ) + .delete(); + const newSecretReferences = data + .filter(({ references }) => references.length) + .flatMap(({ secretId, references }) => + references.map(({ environment, secretPath, secretKey }) => ({ + secretPath, + secretId, + environment, + secretKey + })) + ); + if (!newSecretReferences.length) return; + const secretReferences = await (tx || db).batchInsert(TableName.SecretReferenceV2, newSecretReferences); + return secretReferences; + } catch (error) { + throw new DatabaseError({ error, name: "UpsertSecretReference" }); + } + }; + + const findReferencedSecretReferences = async (projectId: string, envSlug: string, secretPath: string, tx?: Knex) => { + try { + const docs = await (tx || db.replicaNode())(TableName.SecretReferenceV2) + .where({ + secretPath, + environment: envSlug + }) + .join(TableName.SecretV2, `${TableName.SecretV2}.id`, `${TableName.SecretReferenceV2}.secretId`) + .join(TableName.SecretFolder, `${TableName.SecretV2}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .where("projectId", projectId) + .select(selectAllTableCols(TableName.SecretReferenceV2)) + .select("folderId"); + + return docs; + } catch (error) { + throw new DatabaseError({ error, name: "FindReferencedSecretReferences" }); + } + }; + + // special query to backfill secret value + const findAllProjectSecretValues = async (projectId: string, tx?: Knex) => { + try { + const docs = await (tx || db.replicaNode())(TableName.SecretV2) + .join(TableName.SecretFolder, `${TableName.SecretV2}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .where("projectId", projectId) + // not empty + .whereNotNull("encryptedValue") + .select("encryptedValue", `${TableName.SecretV2}.id` as "id"); + return docs; + } catch (error) { + throw new DatabaseError({ error, name: "FindAllProjectSecretValues" }); + } + }; + + const findOneWithTags = async (filter: Partial, tx?: Knex) => { + try { + const rawDocs = await (tx || db.replicaNode())(TableName.SecretV2) + .where(filter) + .leftJoin( + TableName.SecretV2JnTag, + `${TableName.SecretV2}.id`, + `${TableName.SecretV2JnTag}.${TableName.SecretV2}Id` + ) + .leftJoin( + TableName.SecretTag, + `${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`, + `${TableName.SecretTag}.id` + ) + .select(selectAllTableCols(TableName.SecretV2)) + .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) + .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")); + const docs = sqlNestRelationships({ + data: rawDocs, + key: "id", + parentMapper: (el) => ({ _id: el.id, ...SecretsV2Schema.parse(el) }), + childrenMapper: [ + { + key: "tagId", + label: "tags" as const, + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ + id, + color, + slug, + name: slug + }) + } + ] + }); + return docs?.[0]; + } catch (error) { + throw new DatabaseError({ error, name: "FindOneWIthTags" }); + } + }; + + return { + ...secretOrm, + update, + bulkUpdate, + deleteMany, + bulkUpdateNoVersionIncrement, + getSecretTags, + findOneWithTags, + findByFolderId, + findByFolderIds, + findBySecretKeys, + upsertSecretReferences, + findReferencedSecretReferences, + findAllProjectSecretValues, + countByFolderIds, + findOne, + find + }; +}; diff --git a/backend/src/services/secret-v2-bridge/secret-v2-bridge-fns.ts b/backend/src/services/secret-v2-bridge/secret-v2-bridge-fns.ts new file mode 100644 index 0000000000..95d2cdbf41 --- /dev/null +++ b/backend/src/services/secret-v2-bridge/secret-v2-bridge-fns.ts @@ -0,0 +1,594 @@ +import path from "node:path"; + +import { TableName, TSecretFolders, TSecretsV2 } from "@app/db/schemas"; +import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { groupBy } from "@app/lib/fn"; +import { logger } from "@app/lib/logger"; + +import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; +import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretV2BridgeDALFactory } from "./secret-v2-bridge-dal"; +import { TFnSecretBulkDelete, TFnSecretBulkInsert, TFnSecretBulkUpdate } from "./secret-v2-bridge-types"; + +const INTERPOLATION_SYNTAX_REG = /\${([a-zA-Z0-9-_.]+)}/g; +// akhilmhdh: JS regex with global save state in .test +const INTERPOLATION_SYNTAX_REG_NON_GLOBAL = /\${([a-zA-Z0-9-_.]+)}/; + +export const shouldUseSecretV2Bridge = (version: number) => version === 3; + +/** + * Grabs and processes nested secret references from a string + * + * This function looks for patterns that match the interpolation syntax in the input string. + * It filters out references that include nested paths, splits them into environment and + * secret path parts, and then returns an array of objects with the environment and the + * joined secret path. + * @example + * const value = "Hello ${dev.someFolder.OtherFolder.SECRET_NAME} and ${prod.anotherFolder.SECRET_NAME}"; + * const result = getAllNestedSecretReferences(value); + * // result will be: + * // [ + * // { environment: 'dev', secretPath: '/someFolder/OtherFolder' }, + * // { environment: 'prod', secretPath: '/anotherFolder' } + * // ] + */ +export const getAllSecretReferences = (maybeSecretReference: string) => { + const references = Array.from(maybeSecretReference.matchAll(INTERPOLATION_SYNTAX_REG), (m) => m[1]); + + const nestedReferences = references + .filter((el) => el.includes(".")) + .map((el) => { + const [environment, ...secretPathList] = el.split("."); + return { + environment, + secretPath: path.join("/", ...secretPathList.slice(0, -1)), + secretKey: secretPathList[secretPathList.length - 1] + }; + }); + const localReferences = references.filter((el) => !el.includes(".")); + return { nestedReferences, localReferences }; +}; + +// these functions are special functions shared by a couple of resources +// used by secret approval, rotation or anywhere in which secret needs to modified +export const fnSecretBulkInsert = async ({ + // TODO: Pick types here + folderId, + inputSecrets, + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + tx +}: TFnSecretBulkInsert) => { + const sanitizedInputSecrets = inputSecrets.map( + ({ + skipMultilineEncoding, + type, + key, + userId, + encryptedComment, + version, + metadata, + reminderNote, + encryptedValue, + reminderRepeatDays + }) => ({ + skipMultilineEncoding, + type, + key, + userId, + encryptedComment, + version, + metadata, + reminderNote, + encryptedValue, + reminderRepeatDays + }) + ); + + const newSecrets = await secretDAL.insertMany( + sanitizedInputSecrets.map((el) => ({ ...el, folderId })), + tx + ); + const newSecretGroupedByKeyName = groupBy(newSecrets, (item) => item.key); + const newSecretTags = inputSecrets.flatMap(({ tagIds: secretTags = [], key }) => + secretTags.map((tag) => ({ + [`${TableName.SecretTag}Id` as const]: tag, + [`${TableName.SecretV2}Id` as const]: newSecretGroupedByKeyName[key][0].id + })) + ); + const secretVersions = await secretVersionDAL.insertMany( + sanitizedInputSecrets.map((el) => ({ + ...el, + folderId, + secretId: newSecretGroupedByKeyName[el.key][0].id + })), + tx + ); + await secretDAL.upsertSecretReferences( + inputSecrets.map(({ references = [], key }) => ({ + secretId: newSecretGroupedByKeyName[key][0].id, + references + })), + tx + ); + if (newSecretTags.length) { + const secTags = await secretTagDAL.saveTagsToSecretV2(newSecretTags, tx); + const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId); + const newSecretVersionTags = secTags.flatMap(({ secrets_v2Id, secret_tagsId }) => ({ + [`${TableName.SecretVersionV2}Id` as const]: secVersionsGroupBySecId[secrets_v2Id][0].id, + [`${TableName.SecretTag}Id` as const]: secret_tagsId + })); + await secretVersionTagDAL.insertMany(newSecretVersionTags, tx); + } + + return newSecrets.map((secret) => ({ ...secret, _id: secret.id })); +}; + +export const fnSecretBulkUpdate = async ({ + tx, + inputSecrets, + folderId, + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL +}: TFnSecretBulkUpdate) => { + const sanitizedInputSecrets = inputSecrets.map( + ({ + filter, + data: { + skipMultilineEncoding, + type, + key, + encryptedValue, + userId, + encryptedComment, + metadata, + reminderNote, + reminderRepeatDays + } + }) => ({ + filter: { ...filter, folderId }, + data: { + skipMultilineEncoding, + type, + key, + userId, + encryptedComment, + metadata, + reminderNote, + encryptedValue, + reminderRepeatDays + } + }) + ); + + const newSecrets = await secretDAL.bulkUpdate(sanitizedInputSecrets, tx); + const secretVersions = await secretVersionDAL.insertMany( + newSecrets.map( + ({ + skipMultilineEncoding, + type, + key, + userId, + encryptedComment, + version, + metadata, + reminderNote, + encryptedValue, + reminderRepeatDays, + id: secretId + }) => ({ + skipMultilineEncoding, + type, + key, + userId, + encryptedComment, + version, + metadata, + reminderNote, + encryptedValue, + reminderRepeatDays, + folderId, + secretId + }) + ), + tx + ); + await secretDAL.upsertSecretReferences( + inputSecrets + .filter(({ data: { references } }) => Boolean(references)) + .map(({ data: { references = [] } }, i) => ({ + secretId: newSecrets[i].id, + references + })), + tx + ); + const secsUpdatedTag = inputSecrets.flatMap(({ data: { tags } }, i) => + tags !== undefined ? { tags, secretId: newSecrets[i].id } : [] + ); + if (secsUpdatedTag.length) { + await secretTagDAL.deleteTagsToSecretV2( + { $in: { secrets_v2Id: secsUpdatedTag.map(({ secretId }) => secretId) } }, + tx + ); + const newSecretTags = secsUpdatedTag.flatMap(({ tags: secretTags = [], secretId }) => + secretTags.map((tag) => ({ + [`${TableName.SecretTag}Id` as const]: tag, + [`${TableName.SecretV2}Id` as const]: secretId + })) + ); + if (newSecretTags.length) { + const secTags = await secretTagDAL.saveTagsToSecretV2(newSecretTags, tx); + const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId); + const newSecretVersionTags = secTags.flatMap(({ secrets_v2Id, secret_tagsId }) => ({ + [`${TableName.SecretVersionV2}Id` as const]: secVersionsGroupBySecId[secrets_v2Id][0].id, + [`${TableName.SecretTag}Id` as const]: secret_tagsId + })); + await secretVersionTagDAL.insertMany(newSecretVersionTags, tx); + } + } + + return newSecrets.map((secret) => ({ ...secret, _id: secret.id })); +}; + +export const fnSecretBulkDelete = async ({ + folderId, + inputSecrets, + tx, + actorId, + secretDAL, + secretQueueService +}: TFnSecretBulkDelete) => { + const deletedSecrets = await secretDAL.deleteMany( + inputSecrets.map(({ type, secretKey }) => ({ + key: secretKey, + type + })), + folderId, + actorId, + tx + ); + + await Promise.allSettled( + deletedSecrets + .filter(({ reminderRepeatDays }) => Boolean(reminderRepeatDays)) + .map(({ id, reminderRepeatDays }) => + secretQueueService.removeSecretReminder({ secretId: id, repeatDays: reminderRepeatDays as number }) + ) + ); + + return deletedSecrets; +}; + +// Introduce a new interface for mapping parent IDs to their children +interface FolderMap { + [parentId: string]: TSecretFolders[]; +} +const buildHierarchy = (folders: TSecretFolders[]): FolderMap => { + const map: FolderMap = {}; + map.null = []; // Initialize mapping for root directory + + folders.forEach((folder) => { + const parentId = folder.parentId || "null"; + if (!map[parentId]) { + map[parentId] = []; + } + map[parentId].push(folder); + }); + + return map; +}; + +const generatePaths = ( + map: FolderMap, + parentId: string = "null", + basePath: string = "", + currentDepth: number = 0 +): { path: string; folderId: string }[] => { + const children = map[parentId || "null"] || []; + let paths: { path: string; folderId: string }[] = []; + + children.forEach((child) => { + // Determine if this is the root folder of the environment. If no parentId is present and the name is root, it's the root folder + const isRootFolder = child.name === "root" && !child.parentId; + + // Form the current path based on the base path and the current child + // eslint-disable-next-line no-nested-ternary + const currPath = basePath === "" ? (isRootFolder ? "/" : `/${child.name}`) : `${basePath}/${child.name}`; + + // Add the current path + paths.push({ + path: currPath, + folderId: child.id + }); + + // We make sure that the recursion depth doesn't exceed 20. + // We do this to create "circuit break", basically to ensure that we can't encounter any potential memory leaks. + if (currentDepth >= 20) { + logger.info(`generatePaths: Recursion depth exceeded 20, breaking out of recursion [map=${JSON.stringify(map)}]`); + return; + } + // Recursively generate paths for children, passing down the formatted path + const childPaths = generatePaths(map, child.id, currPath, currentDepth + 1); + paths = paths.concat( + childPaths.map((p) => ({ + path: p.path, + folderId: p.folderId + })) + ); + }); + + return paths; +}; + +type TRecursivelyFetchSecretsFromFoldersArg = { + folderDAL: Pick; + projectEnvDAL: Pick; + projectId: string; + environment: string; + currentPath: string; +}; + +export const recursivelyGetSecretPaths = async ({ + folderDAL, + projectEnvDAL, + projectId, + environment, + currentPath +}: TRecursivelyFetchSecretsFromFoldersArg) => { + const env = await projectEnvDAL.findOne({ + projectId, + slug: environment + }); + + if (!env) { + throw new NotFoundError({ + message: `Environment with slug '${environment}' in project with ID ${projectId} not found` + }); + } + + // Fetch all folders in env once with a single query + const folders = await folderDAL.find({ + envId: env.id, + isReserved: false + }); + + // Build the folder hierarchy map + const folderMap = buildHierarchy(folders); + + // Generate the paths paths and normalize the root path to / + const paths = generatePaths(folderMap).map((p) => ({ + path: p.path === "/" ? p.path : p.path.substring(1), + folderId: p.folderId + })); + + const pathsInCurrentDirectory = paths.filter((folder) => + folder.path.startsWith(currentPath === "/" ? "" : currentPath) + ); + + return pathsInCurrentDirectory; +}; +// used to convert multi line ones to quotes ones with \n +const formatMultiValueEnv = (val?: string) => { + if (!val) return ""; + if (!val.match("\n")) return val; + return `"${val.replace(/\n/g, "\\n")}"`; +}; + +type TSecretReferenceTraceNode = { + key: string; + value?: string; + environment: string; + secretPath: string; + children: TSecretReferenceTraceNode[]; +}; +type TInterpolateSecretArg = { + projectId: string; + decryptSecretValue: (encryptedValue?: Buffer | null) => string | undefined; + secretDAL: Pick; + folderDAL: Pick; + canExpandValue: (environment: string, secretPath: string, secretName: string, secretTagSlugs: string[]) => boolean; +}; + +const MAX_SECRET_REFERENCE_DEPTH = 10; +export const expandSecretReferencesFactory = ({ + projectId, + decryptSecretValue: decryptSecret, + secretDAL, + folderDAL, + canExpandValue +}: TInterpolateSecretArg) => { + const secretCache: Record> = {}; + const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`; + + const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => { + const cacheKey = getCacheUniqueKey(environment, secretPath); + + if (secretCache?.[cacheKey]) { + return secretCache[cacheKey][secretKey] || { value: "", tags: [] }; + } + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) return { value: "", tags: [] }; + const secrets = await secretDAL.findByFolderId(folder.id); + + const decryptedSecret = secrets.reduce>((prev, secret) => { + // eslint-disable-next-line no-param-reassign + prev[secret.key] = { value: decryptSecret(secret.encryptedValue) || "", tags: secret.tags?.map((el) => el.slug) }; + return prev; + }, {}); + + secretCache[cacheKey] = decryptedSecret; + + return secretCache[cacheKey][secretKey] || { value: "", tags: [] }; + }; + + const recursivelyExpandSecret = async (dto: { + value?: string; + secretPath: string; + environment: string; + shouldStackTrace?: boolean; + }) => { + const stackTrace = { ...dto, key: "root", children: [] } as TSecretReferenceTraceNode; + + if (!dto.value) return { expandedValue: "", stackTrace }; + const stack = [{ ...dto, depth: 0, trace: stackTrace }]; + let expandedValue = dto.value; + + while (stack.length) { + const { value, secretPath, environment, depth, trace } = stack.pop()!; + + // eslint-disable-next-line no-continue + if (depth > MAX_SECRET_REFERENCE_DEPTH) continue; + const refs = value?.match(INTERPOLATION_SYNTAX_REG); + + if (refs) { + for (const interpolationSyntax of refs) { + const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1); + const entities = interpolationKey.trim().split("."); + + // eslint-disable-next-line no-continue + if (!entities.length) continue; + + let referencedSecretPath = ""; + let referencedSecretKey = ""; + let referencedSecretEnvironmentSlug = ""; + let referencedSecretValue = ""; + + if (entities.length === 1) { + const [secretKey] = entities; + + // eslint-disable-next-line no-continue,no-await-in-loop + const referredValue = await fetchSecret(environment, secretPath, secretKey); + if (!canExpandValue(environment, secretPath, secretKey, referredValue.tags)) + throw new ForbiddenRequestError({ + message: `You are attempting to reference secret named ${secretKey} from environment ${environment} in path ${secretPath} which you do not have access to.` + }); + + const cacheKey = getCacheUniqueKey(environment, secretPath); + secretCache[cacheKey][secretKey] = referredValue; + + referencedSecretValue = referredValue.value; + referencedSecretKey = secretKey; + referencedSecretPath = secretPath; + referencedSecretEnvironmentSlug = environment; + } else { + const secretReferenceEnvironment = entities[0]; + const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1)); + const secretReferenceKey = entities[entities.length - 1]; + + // eslint-disable-next-line no-await-in-loop + const referedValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey); + if (!canExpandValue(secretReferenceEnvironment, secretReferencePath, secretReferenceKey, referedValue.tags)) + throw new ForbiddenRequestError({ + message: `You are attempting to reference secret named ${secretReferenceKey} from environment ${secretReferenceEnvironment} in path ${secretReferencePath} which you do not have access to.` + }); + + const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath); + secretCache[cacheKey][secretReferenceKey] = referedValue; + + referencedSecretValue = referedValue.value; + referencedSecretKey = secretReferenceKey; + referencedSecretPath = secretReferencePath; + referencedSecretEnvironmentSlug = secretReferenceEnvironment; + } + + const node = { + value: referencedSecretValue, + secretPath: referencedSecretPath, + environment: referencedSecretEnvironmentSlug, + depth: depth + 1, + trace + }; + + const shouldExpandMore = INTERPOLATION_SYNTAX_REG_NON_GLOBAL.test(referencedSecretValue); + if (dto.shouldStackTrace) { + const stackTraceNode = { ...node, children: [], key: referencedSecretKey, trace: null }; + trace?.children.push(stackTraceNode); + // if stack trace this would be child node + if (shouldExpandMore) { + stack.push({ ...node, trace: stackTraceNode }); + } + } else if (shouldExpandMore) { + // if no stack trace is needed we just keep going with root node + stack.push(node); + } + + if (referencedSecretValue) { + expandedValue = expandedValue.replaceAll( + interpolationSyntax, + () => referencedSecretValue // prevents special characters from triggering replacement patterns + ); + } + } + } + } + + return { expandedValue, stackTrace }; + }; + + const expandSecret = async (inputSecret: { + value?: string; + skipMultilineEncoding?: boolean | null; + secretPath: string; + environment: string; + }) => { + if (!inputSecret.value) return inputSecret.value; + + const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG)); + if (!shouldExpand) return inputSecret.value; + + const { expandedValue } = await recursivelyExpandSecret(inputSecret); + + return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedValue) : expandedValue; + }; + + const getExpandedSecretStackTrace = async (inputSecret: { + value?: string; + secretPath: string; + environment: string; + }) => { + const { stackTrace, expandedValue } = await recursivelyExpandSecret({ ...inputSecret, shouldStackTrace: true }); + return { stackTrace, expandedValue }; + }; + + return { expandSecretReferences: expandSecret, getExpandedSecretStackTrace }; +}; + +export const reshapeBridgeSecret = ( + workspaceId: string, + environment: string, + secretPath: string, + secret: Omit & { + value: string; + comment: string; + tags?: { + id: string; + slug: string; + color?: string | null; + name: string; + }[]; + } +) => ({ + secretKey: secret.key, + secretPath, + workspace: workspaceId, + environment, + secretValue: secret.value || "", + secretComment: secret.comment || "", + version: secret.version, + type: secret.type, + _id: secret.id, + id: secret.id, + user: secret.userId, + tags: secret.tags, + skipMultilineEncoding: secret.skipMultilineEncoding, + secretReminderRepeatDays: secret.reminderRepeatDays, + secretReminderNote: secret.reminderNote, + metadata: secret.metadata, + createdAt: secret.createdAt, + updatedAt: secret.updatedAt +}); diff --git a/backend/src/services/secret-v2-bridge/secret-v2-bridge-service.ts b/backend/src/services/secret-v2-bridge/secret-v2-bridge-service.ts new file mode 100644 index 0000000000..d49a183aea --- /dev/null +++ b/backend/src/services/secret-v2-bridge/secret-v2-bridge-service.ts @@ -0,0 +1,2057 @@ +import { ForbiddenError, PureAbility, subject } from "@casl/ability"; +import { z } from "zod"; + +import { ProjectMembershipRole, SecretsV2Schema, SecretType, TableName } from "@app/db/schemas"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service"; +import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal"; +import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal"; +import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { diff, groupBy } from "@app/lib/fn"; +import { setKnexStringValue } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; + +import { ActorType } from "../auth/auth-type"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; +import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; +import { TSecretQueueFactory } from "../secret/secret-queue"; +import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; +import { TSecretImportDALFactory } from "../secret-import/secret-import-dal"; +import { fnSecretsV2FromImports } from "../secret-import/secret-import-fns"; +import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal"; +import { TSecretV2BridgeDALFactory } from "./secret-v2-bridge-dal"; +import { + expandSecretReferencesFactory, + fnSecretBulkDelete, + fnSecretBulkInsert, + fnSecretBulkUpdate, + getAllSecretReferences, + recursivelyGetSecretPaths, + reshapeBridgeSecret +} from "./secret-v2-bridge-fns"; +import { + SecretOperations, + TBackFillSecretReferencesDTO, + TCreateManySecretDTO, + TCreateSecretDTO, + TDeleteManySecretDTO, + TDeleteSecretDTO, + TGetASecretDTO, + TGetSecretReferencesTreeDTO, + TGetSecretsDTO, + TGetSecretsRawByFolderMappingsDTO, + TGetSecretVersionsDTO, + TMoveSecretsDTO, + TSecretReference, + TUpdateManySecretDTO, + TUpdateSecretDTO +} from "./secret-v2-bridge-types"; +import { TSecretVersionV2DALFactory } from "./secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "./secret-version-tag-dal"; + +type TSecretV2BridgeServiceFactoryDep = { + secretDAL: TSecretV2BridgeDALFactory; + secretVersionDAL: TSecretVersionV2DALFactory; + kmsService: Pick; + secretVersionTagDAL: Pick; + secretTagDAL: TSecretTagDALFactory; + permissionService: Pick; + projectEnvDAL: Pick; + folderDAL: Pick< + TSecretFolderDALFactory, + "findBySecretPath" | "updateById" | "findById" | "findByManySecretPath" | "find" | "findBySecretPathMultiEnv" + >; + secretImportDAL: Pick; + secretQueueService: Pick; + secretApprovalPolicyService: Pick; + secretApprovalRequestDAL: Pick; + secretApprovalRequestSecretDAL: Pick< + TSecretApprovalRequestSecretDALFactory, + "insertV2Bridge" | "insertApprovalSecretV2Tags" + >; + snapshotService: Pick; +}; + +export type TSecretV2BridgeServiceFactory = ReturnType; + +/* + * This service is a bridge from our old architecture towards the new architecture + */ +export const secretV2BridgeServiceFactory = ({ + secretDAL, + projectEnvDAL, + secretTagDAL, + secretVersionDAL, + folderDAL, + permissionService, + snapshotService, + secretQueueService, + secretImportDAL, + secretVersionTagDAL, + secretApprovalPolicyService, + secretApprovalRequestDAL, + secretApprovalRequestSecretDAL, + kmsService +}: TSecretV2BridgeServiceFactoryDep) => { + const $validateSecretReferences = async ( + projectId: string, + permission: PureAbility, + references: ReturnType["nestedReferences"] + ) => { + if (!references.length) return; + + const uniqueReferenceEnvironmentSlugs = Array.from(new Set(references.map((el) => el.environment))); + const referencesEnvironments = await projectEnvDAL.findBySlugs(projectId, uniqueReferenceEnvironmentSlugs); + if (referencesEnvironments.length !== uniqueReferenceEnvironmentSlugs.length) + throw new BadRequestError({ + message: `Referenced environment not found. Missing ${diff( + uniqueReferenceEnvironmentSlugs, + referencesEnvironments.map((el) => el.slug) + ).join(",")}` + }); + + const referencesEnvironmentGroupBySlug = groupBy(referencesEnvironments, (i) => i.slug); + const referredFolders = await folderDAL.findByManySecretPath( + references.map((el) => ({ + secretPath: el.secretPath, + envId: referencesEnvironmentGroupBySlug[el.environment][0].id + })) + ); + const referencesFolderGroupByPath = groupBy(referredFolders.filter(Boolean), (i) => `${i?.envId}-${i?.path}`); + const referredSecrets = await secretDAL.find({ + $complex: { + operator: "or", + value: references.map((el) => { + const folderId = + referencesFolderGroupByPath[`${referencesEnvironmentGroupBySlug[el.environment][0].id}-${el.secretPath}`][0] + ?.id; + if (!folderId) throw new BadRequestError({ message: `Referenced path ${el.secretPath} doesn't exist` }); + + return { + operator: "and", + value: [ + { + operator: "eq", + field: "folderId", + value: folderId + }, + { + operator: "eq", + field: "key", + value: el.secretKey + } + ] + }; + }) + } + }); + + if ( + referredSecrets.length !== + new Set(references.map(({ secretKey, secretPath, environment }) => `${secretKey}.${secretPath}.${environment}`)) + .size // only count unique references + ) + throw new BadRequestError({ + message: `Referenced secret(s) not found: ${diff( + references.map((el) => el.secretKey), + referredSecrets.map((el) => el.key) + ).join(",")}` + }); + + const referredSecretsGroupBySecretKey = groupBy(referredSecrets, (i) => i.key); + references.forEach((el) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: el.environment, + secretPath: el.secretPath, + secretName: el.secretKey, + tags: referredSecretsGroupBySecretKey[el.secretKey][0]?.tags?.map((i) => i.slug) + }) + ); + }); + + return referredSecrets; + }; + + const createSecret = async ({ + actor, + actorId, + actorOrgId, + environment, + actorAuthMethod, + projectId, + secretPath, + ...inputSecret + }: TCreateSecretDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "CreateSecret" + }); + const folderId = folder.id; + + if (ActorType.USER !== actor && inputSecret.type === SecretType.Personal) { + throw new BadRequestError({ message: "Must be user to create personal secret" }); + } + + const doesSecretExist = await secretDAL.findOne({ + key: inputSecret.secretName, + type: SecretType.Shared, + folderId + }); + if (inputSecret.type === SecretType.Shared && doesSecretExist) + throw new BadRequestError({ message: "Secret already exist" }); + + // if user creating personal check its shared also exist + if (inputSecret.type === SecretType.Personal && !doesSecretExist) { + throw new BadRequestError({ + message: "Failed to create personal secret override for no corresponding shared secret" + }); + } + + // validate tags + // fetch all tags and if not same count throw error meaning one was invalid tags + const tags = inputSecret.tagIds ? await secretTagDAL.find({ projectId, $in: { id: inputSecret.tagIds } }) : []; + if ((inputSecret.tagIds || []).length !== tags.length) + throw new NotFoundError({ message: `Tag not found. Found ${tags.map((el) => el.slug).join(",")}` }); + + const { secretName, type, ...inputSecretData } = inputSecret; + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName, + secretTags: tags?.map((el) => el.slug) + }) + ); + + const { nestedReferences, localReferences } = getAllSecretReferences(inputSecret.secretValue); + const allSecretReferences = nestedReferences.concat( + localReferences.map((el) => ({ secretKey: el, secretPath, environment })) + ); + await $validateSecretReferences(projectId, permission, allSecretReferences); + + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + const secret = await secretDAL.transaction((tx) => + fnSecretBulkInsert({ + folderId, + inputSecrets: [ + { + version: 1, + type, + reminderRepeatDays: inputSecretData.secretReminderRepeatDays, + encryptedComment: setKnexStringValue( + inputSecretData.secretComment, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + encryptedValue: inputSecretData.secretValue + ? secretManagerEncryptor({ plainText: Buffer.from(inputSecretData.secretValue) }).cipherTextBlob + : undefined, + reminderNote: inputSecretData.secretReminderNote, + skipMultilineEncoding: inputSecretData.skipMultilineEncoding, + key: secretName, + userId: inputSecret.type === SecretType.Personal ? actorId : null, + tagIds: inputSecret.tagIds, + references: nestedReferences + } + ], + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + tx + }) + ); + + if (inputSecret.type === SecretType.Shared) { + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + secretPath, + actorId, + actor, + projectId, + environmentSlug: folder.environment.slug + }); + } + + return reshapeBridgeSecret(projectId, environment, secretPath, { + ...secret[0], + value: inputSecret.secretValue, + comment: inputSecret.secretComment || "" + }); + }; + + const updateSecret = async ({ + actor, + actorId, + actorOrgId, + environment, + actorAuthMethod, + projectId, + secretPath, + ...inputSecret + }: TUpdateSecretDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + if (inputSecret.newSecretName === "") { + throw new BadRequestError({ message: "New secret name cannot be empty" }); + } + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "UpdateSecret" + }); + const folderId = folder.id; + + if (ActorType.USER !== actor && inputSecret.type === SecretType.Personal) { + throw new BadRequestError({ message: "Must be user to create personal secret" }); + } + + if (inputSecret.newSecretName && inputSecret.type === SecretType.Personal) { + throw new BadRequestError({ message: "Personal secret cannot change the key name" }); + } + + let secret; + let secretId: string; + if (inputSecret.type === SecretType.Personal) { + const personalSecretToModify = await secretDAL.findOne({ + key: inputSecret.secretName, + type: SecretType.Personal, + folderId, + userId: actorId + }); + if (!personalSecretToModify) + throw new NotFoundError({ message: `Personal secret with name ${inputSecret.secretName} not found` }); + secretId = personalSecretToModify.id; + secret = personalSecretToModify; + } else { + const sharedSecretToModify = await secretDAL.findOne({ + key: inputSecret.secretName, + type: SecretType.Shared, + folderId + }); + if (!sharedSecretToModify) + throw new NotFoundError({ message: `Secret with name ${inputSecret.secretName} not found` }); + secretId = sharedSecretToModify.id; + secret = sharedSecretToModify; + } + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: inputSecret.secretName, + secretTags: secret.tags.map((el) => el.slug) + }) + ); + + // validate tags + // fetch all tags and if not same count throw error meaning one was invalid tags + const tags = inputSecret.tagIds ? await secretTagDAL.find({ projectId, $in: { id: inputSecret.tagIds } }) : []; + if ((inputSecret.tagIds || []).length !== tags.length) + throw new NotFoundError({ message: `Tag not found. Found ${tags.map((el) => el.slug).join(",")}` }); + + // now check with new ids + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: inputSecret.secretName, + secretTags: tags?.map((el) => el.slug) + }) + ); + + if (inputSecret.newSecretName) { + const doesNewNameSecretExist = await secretDAL.findOne({ + key: inputSecret.newSecretName, + type: SecretType.Shared, + folderId + }); + if (doesNewNameSecretExist) throw new BadRequestError({ message: "Secret with the new name already exist" }); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: inputSecret.newSecretName, + secretTags: tags?.map((el) => el.slug) + }) + ); + } + + const { secretName, secretValue } = inputSecret; + + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + const encryptedValue = secretValue + ? { + encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob, + references: getAllSecretReferences(secretValue).nestedReferences + } + : {}; + + if (secretValue) { + const { nestedReferences, localReferences } = getAllSecretReferences(secretValue); + const allSecretReferences = nestedReferences.concat( + localReferences.map((el) => ({ secretKey: el, secretPath, environment })) + ); + await $validateSecretReferences(projectId, permission, allSecretReferences); + } + + const updatedSecret = await secretDAL.transaction(async (tx) => + fnSecretBulkUpdate({ + folderId, + inputSecrets: [ + { + filter: { id: secretId }, + data: { + reminderRepeatDays: inputSecret.secretReminderRepeatDays, + encryptedComment: setKnexStringValue( + inputSecret.secretComment, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + reminderNote: inputSecret.secretReminderNote, + skipMultilineEncoding: inputSecret.skipMultilineEncoding, + key: inputSecret.newSecretName || secretName, + tags: inputSecret.tagIds, + ...encryptedValue + } + } + ], + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + tx + }) + ); + await secretQueueService.handleSecretReminder({ + newSecret: { + id: updatedSecret[0].id, + ...inputSecret + }, + oldSecret: secret, + projectId + }); + + if (inputSecret.type === SecretType.Shared) { + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + secretPath, + actorId, + actor, + projectId, + environmentSlug: folder.environment.slug + }); + } + + return reshapeBridgeSecret(projectId, environment, secretPath, { + ...updatedSecret[0], + value: inputSecret.secretValue || "", + comment: inputSecret.secretComment || "" + }); + }; + + const deleteSecret = async ({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + environment, + projectId, + secretPath, + ...inputSecret + }: TDeleteSecretDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "DeleteSecret" + }); + const folderId = folder.id; + + if (ActorType.USER !== actor && inputSecret.type === SecretType.Personal) { + throw new BadRequestError({ message: "Must be user to delete personal secret" }); + } + + const secretToDelete = await secretDAL.findOne({ + key: inputSecret.secretName, + folderId, + ...(inputSecret.type === SecretType.Shared + ? {} + : { + type: SecretType.Personal, + userId: actorId + }) + }); + if (!secretToDelete) throw new NotFoundError({ message: "Secret not found" }); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: secretToDelete.key, + secretTags: secretToDelete.tags?.map((el) => el.slug) + }) + ); + + const deletedSecret = await secretDAL.transaction(async (tx) => + fnSecretBulkDelete({ + projectId, + folderId, + actorId, + secretDAL, + secretQueueService, + inputSecrets: [ + { + type: inputSecret.type as SecretType, + secretKey: inputSecret.secretName + } + ], + tx + }) + ); + + if (inputSecret.type === SecretType.Shared) { + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + secretPath, + actorId, + actor, + projectId, + environmentSlug: folder.environment.slug + }); + } + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + return reshapeBridgeSecret(projectId, environment, secretPath, { + ...deletedSecret[0], + value: deletedSecret[0].encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: deletedSecret[0].encryptedValue }).toString() + : "", + comment: deletedSecret[0].encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: deletedSecret[0].encryptedComment }).toString() + : "" + }); + }; + + // get unique secrets count for multiple envs + const getSecretsCountMultiEnv = async ({ + actorId, + path, + projectId, + actor, + actorOrgId, + actorAuthMethod, + environments, + isInternal, + ...params + }: Pick & { + environments: string[]; + isInternal?: boolean; + }) => { + if (!isInternal) { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets); + } + + const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environments, path); + if (!folders.length) return 0; + + const count = await secretDAL.countByFolderIds( + folders.map((folder) => folder.id), + actorId, + undefined, + params + ); + + return count; + }; + + // get secret count for individual env + const getSecretsCount = async ({ + actorId, + path, + environment, + projectId, + actor, + actorOrgId, + actorAuthMethod, + ...params + }: Pick< + TGetSecretsDTO, + | "actorId" + | "actor" + | "path" + | "projectId" + | "actorOrgId" + | "actorAuthMethod" + | "tagSlugs" + | "environment" + | "search" + >) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets); + + const folder = await folderDAL.findBySecretPath(projectId, environment, path); + if (!folder) return 0; + + const count = await secretDAL.countByFolderIds([folder.id], actorId, undefined, params); + + return count; + }; + + const getSecretsByFolderMappings = async ( + { projectId, userId, filters, folderMappings }: TGetSecretsRawByFolderMappingsDTO, + projectPermission: Awaited>["permission"] + ) => { + const groupedFolderMappings = groupBy(folderMappings, (folderMapping) => folderMapping.folderId); + + const secrets = await secretDAL.findByFolderIds( + folderMappings.map((folderMapping) => folderMapping.folderId), + userId, + undefined, + filters + ); + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const decryptedSecrets = secrets + .filter((el) => + projectPermission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: groupedFolderMappings[el.folderId][0].environment, + secretPath: groupedFolderMappings[el.folderId][0].path, + secretName: el.key, + secretTags: el.tags.map((i) => i.slug) + }) + ) + ) + .map((secret) => + reshapeBridgeSecret( + projectId, + groupedFolderMappings[secret.folderId][0].environment, + groupedFolderMappings[secret.folderId][0].path, + { + ...secret, + value: secret.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString() + : "", + comment: secret.encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedComment }).toString() + : "" + } + ) + ); + + return decryptedSecrets; + }; + + // get secrets for multiple envs + const getSecretsMultiEnv = async ({ + actorId, + path, + environments, + projectId, + actor, + actorOrgId, + actorAuthMethod, + isInternal, + ...params + }: Pick & { + environments: string[]; + isInternal?: boolean; + }) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + if (!isInternal) { + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets); + } + + const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environments, path); + + if (!folders.length) { + return []; + } + + const folderMappings = folders.map((folder) => ({ + folderId: folder.id, + path, + environment: folder.environment.slug + })); + + const decryptedSecrets = await getSecretsByFolderMappings( + { + projectId, + folderMappings, + filters: params, + userId: actorId + }, + permission + ); + + return decryptedSecrets; + }; + + const getSecrets = async ({ + actorId, + path, + environment, + projectId, + actor, + actorOrgId, + actorAuthMethod, + includeImports, + recursive, + expandSecretReferences: shouldExpandSecretReferences, + ...params + }: TGetSecretsDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets); + + let paths: { folderId: string; path: string }[] = []; + + if (recursive) { + const deepPaths = await recursivelyGetSecretPaths({ + folderDAL, + projectEnvDAL, + projectId, + environment, + currentPath: path + }); + + if (!deepPaths) return { secrets: [], imports: [] }; + + paths = deepPaths.map(({ folderId, path: p }) => ({ folderId, path: p })); + } else { + const folder = await folderDAL.findBySecretPath(projectId, environment, path); + if (!folder) return { secrets: [], imports: [] }; + + paths = [{ folderId: folder.id, path }]; + } + + const groupedPaths = groupBy(paths, (p) => p.folderId); + + const secrets = await secretDAL.findByFolderIds( + paths.map((p) => p.folderId), + actorId, + undefined, + params + ); + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const decryptedSecrets = secrets + .filter((el) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath: groupedPaths[el.folderId][0].path, + secretName: el.key, + secretTags: el.tags.map((i) => i.slug) + }) + ) + ) + .map((secret) => + reshapeBridgeSecret(projectId, environment, groupedPaths[secret.folderId][0].path, { + ...secret, + value: secret.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString() + : "", + comment: secret.encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedComment }).toString() + : "" + }) + ); + + const { expandSecretReferences } = expandSecretReferencesFactory({ + projectId, + folderDAL, + secretDAL, + decryptSecretValue: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined), + canExpandValue: (expandEnvironment, expandSecretPath, expandSecretKey, expandSecretTags) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: expandEnvironment, + secretPath: expandSecretPath, + secretName: expandSecretKey, + secretTags: expandSecretTags + }) + ) + }); + + if (shouldExpandSecretReferences) { + const secretsGroupByPath = groupBy(decryptedSecrets, (i) => i.secretPath); + await Promise.allSettled( + Object.keys(secretsGroupByPath).map((groupedPath) => + Promise.allSettled( + secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => { + const expandedSecretValue = await expandSecretReferences({ + value: decryptedSecret.secretValue, + secretPath: groupedPath, + environment, + skipMultilineEncoding: decryptedSecret.skipMultilineEncoding + }); + // eslint-disable-next-line no-param-reassign + secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || ""; + }) + ) + ) + ); + } + + if (!includeImports) { + return { + secrets: decryptedSecrets + }; + } + + const secretImports = await secretImportDAL.findByFolderIds(paths.map((p) => p.folderId)); + const allowedImports = secretImports.filter(({ isReplication }) => !isReplication); + const importedSecrets = await fnSecretsV2FromImports({ + secretImports: allowedImports, + secretDAL, + folderDAL, + secretImportDAL, + expandSecretReferences, + decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""), + hasSecretAccess: (expandEnvironment, expandSecretPath, expandSecretKey, expandSecretTags) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: expandEnvironment, + secretPath: expandSecretPath, + secretName: expandSecretKey, + secretTags: expandSecretTags + }) + ) + }); + + return { + secrets: decryptedSecrets, + imports: importedSecrets + }; + }; + + const getSecretByName = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + projectId, + environment, + path, + type, + secretName, + version, + includeImports, + expandSecretReferences: shouldExpandSecretReferences + }: TGetASecretDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, path); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "GetSecretByName" + }); + const folderId = folder.id; + + let secretType = type; + if (actor === ActorType.SERVICE) { + logger.info( + `secretServiceFactory: overriding secret type for service token [projectId=${projectId}] [factoryFunctionName=getSecretByName]` + ); + secretType = SecretType.Shared; + } + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const secret = await (version === undefined + ? secretDAL.findOneWithTags({ + folderId, + type: secretType, + key: secretName, + userId: secretType === SecretType.Personal ? actorId : null + }) + : secretVersionDAL + .findOne({ + folderId, + type: secretType, + userId: secretType === SecretType.Personal ? actorId : null, + key: secretName + }) + .then((el) => + SecretsV2Schema.extend({ + tags: z + .object({ slug: z.string(), name: z.string(), id: z.string(), color: z.string() }) + .array() + .default([]) + .optional() + }).parse({ + ...el, + id: el.secretId + }) + )); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath: path, + secretName, + secretTags: (secret?.tags || []).map((el) => el.slug) + }) + ); + + const { expandSecretReferences } = expandSecretReferencesFactory({ + projectId, + folderDAL, + secretDAL, + decryptSecretValue: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined), + canExpandValue: (expandEnvironment, expandSecretPath, expandSecretKey, expandSecretTags) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: expandEnvironment, + secretPath: expandSecretPath, + secretName: expandSecretKey, + secretTags: expandSecretTags + }) + ) + }); + + // now if secret is not found + // then search for imported secrets + // here we consider the import order also thus starting from bottom + if (!secret && includeImports) { + const secretImports = await secretImportDAL.find({ folderId, isReplication: false }); + const importedSecrets = await fnSecretsV2FromImports({ + secretImports, + secretDAL, + folderDAL, + secretImportDAL, + decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""), + expandSecretReferences: shouldExpandSecretReferences ? expandSecretReferences : undefined, + hasSecretAccess: (expandEnvironment, expandSecretPath, expandSecretKey, expandSecretTags) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: expandEnvironment, + secretPath: expandSecretPath, + secretName: expandSecretKey, + secretTags: expandSecretTags + }) + ) + }); + + for (let i = importedSecrets.length - 1; i >= 0; i -= 1) { + for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) { + const importedSecret = importedSecrets[i].secrets[j]; + if (secretName === importedSecret.key) { + return reshapeBridgeSecret(projectId, importedSecrets[i].environment, importedSecrets[i].secretPath, { + ...importedSecret, + value: importedSecret.secretValue || "", + comment: importedSecret.secretComment || "" + }); + } + } + } + } + if (!secret) throw new NotFoundError({ message: `Secret with name '${secretName}' not found` }); + + let secretValue = secret.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString() + : ""; + if (shouldExpandSecretReferences && secretValue) { + // eslint-disable-next-line + const expandedSecretValue = await expandSecretReferences({ + environment, + secretPath: path, + value: secretValue, + skipMultilineEncoding: secret.skipMultilineEncoding + }); + + secretValue = expandedSecretValue || ""; + } + + return reshapeBridgeSecret(projectId, environment, path, { + ...secret, + value: secretValue, + comment: secret.encryptedComment + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedComment }).toString() + : "" + }); + }; + + const createManySecret = async ({ + secretPath, + actor, + actorId, + actorAuthMethod, + actorOrgId, + environment, + projectId, + secrets: inputSecrets + }: TCreateManySecretDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "CreateManySecret" + }); + const folderId = folder.id; + + const secrets = await secretDAL.find({ + folderId, + $complex: { + operator: "and", + value: [ + { + operator: "or", + value: inputSecrets.map((el) => ({ + operator: "and", + value: [ + { + operator: "eq", + field: "key", + value: el.secretKey + }, + { + operator: "eq", + field: "type", + value: SecretType.Shared + } + ] + })) + } + ] + } + }); + if (secrets.length) + throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` }); + + // get all tags + const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds); + const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : []; + if (tags.length !== sanitizedTagIds.length) + throw new NotFoundError({ message: `Tag not found. Found ${tags.map((el) => el.slug).join(",")}` }); + const tagsGroupByID = groupBy(tags, (i) => i.id); + + inputSecrets.forEach((el) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: el.secretKey, + secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug) + }) + ); + }); + + // now get all secret references made and validate the permission + const secretReferencesGroupByInputSecretKey: Record> = {}; + const secretReferences: TSecretReference[] = []; + inputSecrets.forEach((el) => { + if (el.secretValue) { + const references = getAllSecretReferences(el.secretValue); + secretReferencesGroupByInputSecretKey[el.secretKey] = references; + secretReferences.push(...references.nestedReferences); + references.localReferences.forEach((localRefKey) => { + secretReferences.push({ secretKey: localRefKey, secretPath, environment }); + }); + } + }); + await $validateSecretReferences(projectId, permission, secretReferences); + + const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } = + await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId }); + + const newSecrets = await secretDAL.transaction(async (tx) => + fnSecretBulkInsert({ + inputSecrets: inputSecrets.map((el) => { + const references = secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences; + + return { + version: 1, + encryptedComment: setKnexStringValue( + el.secretComment, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + encryptedValue: el.secretValue + ? secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob + : undefined, + skipMultilineEncoding: el.skipMultilineEncoding, + key: el.secretKey, + tagIds: el.tagIds, + references, + type: SecretType.Shared + }; + }), + folderId, + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL, + tx + }) + ); + + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + actor, + actorId, + secretPath, + projectId, + environmentSlug: folder.environment.slug + }); + + return newSecrets.map((el) => + reshapeBridgeSecret(projectId, environment, secretPath, { + ...el, + value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "", + comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : "" + }) + ); + }; + + const updateManySecret = async ({ + actor, + actorId, + actorOrgId, + actorAuthMethod, + environment, + projectId, + secretPath, + secrets: inputSecrets + }: TUpdateManySecretDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "UpdateManySecret" + }); + const folderId = folder.id; + + const secretsToUpdate = await secretDAL.find({ + folderId, + $complex: { + operator: "and", + value: [ + { + operator: "or", + value: inputSecrets.map((el) => ({ + operator: "and", + value: [ + { + operator: "eq", + field: "key", + value: el.secretKey + }, + { + operator: "eq", + field: "type", + value: SecretType.Shared + } + ] + })) + } + ] + } + }); + if (secretsToUpdate.length !== inputSecrets.length) + throw new NotFoundError({ message: `Secret does not exist: ${secretsToUpdate.map((el) => el.key).join(",")}` }); + const secretsToUpdateInDBGroupedByKey = groupBy(secretsToUpdate, (i) => i.key); + + secretsToUpdate.forEach((el) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: el.key, + secretTags: el.tags.map((i) => i.slug) + }) + ); + }); + + // get all tags + const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds); + const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : []; + if (tags.length !== sanitizedTagIds.length) throw new NotFoundError({ message: "Tag not found" }); + const tagsGroupByID = groupBy(tags, (i) => i.id); + + // check again to avoid non authorized tags are removed + inputSecrets.forEach((el) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: el.secretKey, + secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug) + }) + ); + }); + + // now find any secret that needs to update its name + // same process as above + const secretsWithNewName = inputSecrets.filter(({ newSecretName }) => Boolean(newSecretName)); + if (secretsWithNewName.length) { + const secrets = await secretDAL.find({ + folderId, + $complex: { + operator: "and", + value: [ + { + operator: "or", + value: secretsWithNewName.map((el) => ({ + operator: "and", + value: [ + { + operator: "eq", + field: "key", + value: el.secretKey + }, + { + operator: "eq", + field: "type", + value: SecretType.Shared + } + ] + })) + } + ] + } + }); + if (secrets.length) + throw new BadRequestError({ + message: `Secret with new name already exists: ${secretsWithNewName.map((el) => el.newSecretName).join(",")}` + }); + + secretsWithNewName.forEach((el) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: el.newSecretName as string, + secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug) + }) + ); + }); + } + // now get all secret references made and validate the permission + const secretReferencesGroupByInputSecretKey: Record> = {}; + const secretReferences: TSecretReference[] = []; + inputSecrets.forEach((el) => { + if (el.secretValue) { + const references = getAllSecretReferences(el.secretValue); + secretReferencesGroupByInputSecretKey[el.secretKey] = references; + secretReferences.push(...references.nestedReferences); + references.localReferences.forEach((localRefKey) => { + secretReferences.push({ secretKey: localRefKey, secretPath, environment }); + }); + } + }); + await $validateSecretReferences(projectId, permission, secretReferences); + + const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } = + await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId }); + + const secrets = await secretDAL.transaction(async (tx) => + fnSecretBulkUpdate({ + folderId, + tx, + inputSecrets: inputSecrets.map((el) => { + const originalSecret = secretsToUpdateInDBGroupedByKey[el.secretKey][0]; + const encryptedValue = + typeof el.secretValue !== "undefined" + ? { + encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob, + references: secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences + } + : {}; + + return { + filter: { id: originalSecret.id, type: SecretType.Shared }, + data: { + reminderRepeatDays: el.secretReminderRepeatDays, + encryptedComment: setKnexStringValue( + el.secretComment, + (value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob + ), + reminderNote: el.secretReminderNote, + skipMultilineEncoding: el.skipMultilineEncoding, + key: el.newSecretName || el.secretKey, + tags: el.tagIds, + ...encryptedValue + } + }; + }), + secretDAL, + secretVersionDAL, + secretTagDAL, + secretVersionTagDAL + }) + ); + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + actor, + actorId, + secretPath, + projectId, + environmentSlug: folder.environment.slug + }); + + return secrets.map((el) => + reshapeBridgeSecret(projectId, environment, secretPath, { + ...el, + value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "", + comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : "" + }) + ); + }; + + const deleteManySecret = async ({ + secrets: inputSecrets, + secretPath, + environment, + projectId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TDeleteManySecretDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`, + name: "DeleteManySecret" + }); + const folderId = folder.id; + + const secretsToDelete = await secretDAL.find({ + folderId, + $complex: { + operator: "and", + value: [ + { + operator: "or", + value: inputSecrets.map((el) => ({ + operator: "and", + value: [ + { + operator: "eq", + field: "key", + value: el.secretKey + }, + { + operator: "eq", + field: "type", + value: SecretType.Shared + } + ] + })) + } + ] + } + }); + if (secretsToDelete.length !== inputSecrets.length) + throw new NotFoundError({ + message: `One or more secrets does not exist: ${secretsToDelete.map((el) => el.key).join(", ")}` + }); + secretsToDelete.forEach((el) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName: el.key, + secretTags: el.tags?.map((i) => i.slug) + }) + ); + }); + + const secretsDeleted = await secretDAL.transaction(async (tx) => + fnSecretBulkDelete({ + secretDAL, + secretQueueService, + inputSecrets: inputSecrets.map(({ type, secretKey }) => ({ + secretKey, + type: type || SecretType.Shared + })), + projectId, + folderId, + actorId, + tx + }) + ); + + // await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + actor, + actorId, + secretPath, + projectId, + environmentSlug: folder.environment.slug + }); + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + return secretsDeleted.map((el) => + reshapeBridgeSecret(projectId, environment, secretPath, { + ...el, + value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "", + comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : "" + }) + ); + }; + + const getSecretVersions = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + limit = 20, + offset = 0, + secretId + }: TGetSecretVersionsDTO) => { + const secret = await secretDAL.findById(secretId); + if (!secret) throw new NotFoundError({ message: `Secret with ID '${secretId}' not found` }); + + const folder = await folderDAL.findById(secret.folderId); + if (!folder) throw new NotFoundError({ message: `Folder with ID '${secret.folderId}' not found` }); + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + folder.projectId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback); + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId: folder.projectId + }); + const secretVersions = await secretVersionDAL.find({ secretId }, { offset, limit, sort: [["createdAt", "desc"]] }); + return secretVersions.map((el) => + reshapeBridgeSecret(folder.projectId, folder.environment.envSlug, "/", { + ...el, + value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "", + comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : "" + }) + ); + }; + + // this is a backfilling API for secret references + // what it does is it will go through all the secret values and parse all references + // populate the secret reference to do sync integrations + const backfillSecretReferences = async ({ + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod + }: TBackFillSecretReferencesDTO) => { + const { hasRole } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + if (!hasRole(ProjectMembershipRole.Admin)) + throw new ForbiddenRequestError({ message: "Only admins are allowed to take this action" }); + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + await secretDAL.transaction(async (tx) => { + const secrets = await secretDAL.findAllProjectSecretValues(projectId, tx); + await secretDAL.upsertSecretReferences( + secrets + .filter((el) => Boolean(el.encryptedValue)) + .map(({ id, encryptedValue }) => ({ + secretId: id, + references: encryptedValue + ? getAllSecretReferences(secretManagerDecryptor({ cipherTextBlob: encryptedValue }).toString()) + .nestedReferences + : [] + })), + tx + ); + }); + + return { message: "Successfully backfilled secret references" }; + }; + + const moveSecrets = async ({ + sourceEnvironment, + sourceSecretPath, + destinationEnvironment, + destinationSecretPath, + secretIds, + projectId, + shouldOverwrite, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TMoveSecretsDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + const sourceFolder = await folderDAL.findBySecretPath(projectId, sourceEnvironment, sourceSecretPath); + if (!sourceFolder) { + throw new NotFoundError({ + message: `Source folder with path '${sourceSecretPath}' in environment with slug '${sourceEnvironment}' not found` + }); + } + + const destinationFolder = await folderDAL.findBySecretPath( + projectId, + destinationEnvironment, + destinationSecretPath + ); + + if (!destinationFolder) { + throw new NotFoundError({ + message: `Destination folder with path '${destinationSecretPath}' in environment with slug '${destinationEnvironment}' not found` + }); + } + + const sourceSecrets = await secretDAL.find({ + type: SecretType.Shared, + $in: { + [`${TableName.SecretV2}.id` as "id"]: secretIds + } + }); + sourceSecrets.forEach((secret) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + subject(ProjectPermissionSub.Secrets, { + environment: sourceEnvironment, + secretPath: sourceSecretPath, + secretName: secret.key, + secretTags: secret.tags.map((el) => el.slug) + }) + ); + }); + + if (sourceSecrets.length !== secretIds.length) { + throw new BadRequestError({ + message: "Invalid secrets" + }); + } + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + const decryptedSourceSecrets = sourceSecrets.map((secret) => ({ + ...secret, + value: secret.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString() + : undefined + })); + + let isSourceUpdated = false; + let isDestinationUpdated = false; + + // Moving secrets is a two-step process. + await secretDAL.transaction(async (tx) => { + // First step is to create/update the secret in the destination: + const destinationSecretsFromDB = await secretDAL.find( + { + folderId: destinationFolder.id + }, + { tx } + ); + + const decryptedDestinationSecrets = destinationSecretsFromDB.map((secret) => { + return { + ...secret, + value: secret.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString() + : undefined + }; + }); + + const destinationSecretsGroupedByKey = groupBy(decryptedDestinationSecrets, (i) => i.key); + + const locallyCreatedSecrets = decryptedSourceSecrets + .filter(({ key }) => !destinationSecretsGroupedByKey[key]?.[0]) + .map((el) => ({ ...el, operation: SecretOperations.Create })); + + const locallyUpdatedSecrets = decryptedSourceSecrets + .filter( + ({ key, value }) => + destinationSecretsGroupedByKey[key]?.[0] && destinationSecretsGroupedByKey[key]?.[0]?.value !== value + ) + .map((el) => ({ ...el, operation: SecretOperations.Update })); + + if (locallyUpdatedSecrets.length > 0 && !shouldOverwrite) { + const existingKeys = locallyUpdatedSecrets.map((s) => s.key); + + throw new BadRequestError({ + message: `Failed to move secrets. The following secrets already exist in the destination: ${existingKeys.join( + "," + )}` + }); + } + + const isEmpty = locallyCreatedSecrets.length + locallyUpdatedSecrets.length === 0; + + if (isEmpty) { + throw new BadRequestError({ + message: "Selected secrets already exist in the destination." + }); + } + + // permission check whether can create or edit the ones in the destination folder + locallyCreatedSecrets.forEach((secret) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + subject(ProjectPermissionSub.Secrets, { + environment: destinationEnvironment, + secretPath: destinationEnvironment, + secretName: secret.key, + secretTags: secret.tags.map((el) => el.slug) + }) + ); + }); + + locallyUpdatedSecrets.forEach((secret) => { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.Secrets, { + environment: destinationEnvironment, + secretPath: destinationEnvironment, + secretName: secret.key, + secretTags: secret.tags.map((el) => el.slug) + }) + ); + }); + + const destinationFolderPolicy = await secretApprovalPolicyService.getSecretApprovalPolicy( + projectId, + destinationFolder.environment.slug, + destinationFolder.path + ); + + if (destinationFolderPolicy && actor === ActorType.USER) { + // if secret approval policy exists for destination, we create the secret approval request + const localSecretsIds = decryptedDestinationSecrets.map(({ id }) => id); + const latestSecretVersions = await secretVersionDAL.findLatestVersionMany( + destinationFolder.id, + localSecretsIds, + tx + ); + + const approvalRequestDoc = await secretApprovalRequestDAL.create( + { + folderId: destinationFolder.id, + slug: alphaNumericNanoId(), + policyId: destinationFolderPolicy.id, + status: "open", + hasMerged: false, + committerUserId: actorId + }, + tx + ); + + const commits = locallyCreatedSecrets.concat(locallyUpdatedSecrets).map((doc) => { + const { operation } = doc; + const localSecret = destinationSecretsGroupedByKey[doc.key]?.[0]; + + return { + op: operation, + requestId: approvalRequestDoc.id, + metadata: doc.metadata, + key: doc.key, + encryptedValue: doc.encryptedValue, + encryptedComment: doc.encryptedComment, + skipMultilineEncoding: doc.skipMultilineEncoding, + // except create operation other two needs the secret id and version id + ...(operation !== SecretOperations.Create + ? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id } + : {}) + }; + }); + await secretApprovalRequestSecretDAL.insertV2Bridge(commits, tx); + } else { + // apply changes directly + if (locallyCreatedSecrets.length) { + await fnSecretBulkInsert({ + folderId: destinationFolder.id, + secretVersionDAL, + secretDAL, + tx, + secretTagDAL, + secretVersionTagDAL, + inputSecrets: locallyCreatedSecrets.map((doc) => { + return { + type: doc.type, + metadata: doc.metadata, + key: doc.key, + encryptedValue: doc.encryptedValue, + encryptedComment: doc.encryptedComment, + skipMultilineEncoding: doc.skipMultilineEncoding, + reminderNote: doc.reminderNote, + reminderRepeatDays: doc.reminderRepeatDays, + references: doc.value ? getAllSecretReferences(doc.value).nestedReferences : [] + }; + }) + }); + } + if (locallyUpdatedSecrets.length) { + await fnSecretBulkUpdate({ + folderId: destinationFolder.id, + secretVersionDAL, + secretDAL, + tx, + secretTagDAL, + secretVersionTagDAL, + inputSecrets: locallyUpdatedSecrets.map((doc) => { + return { + filter: { + folderId: destinationFolder.id, + id: destinationSecretsGroupedByKey[doc.key][0].id + }, + data: { + metadata: doc.metadata, + key: doc.key, + encryptedComment: doc.encryptedComment, + skipMultilineEncoding: doc.skipMultilineEncoding, + reminderNote: doc.reminderNote, + reminderRepeatDays: doc.reminderRepeatDays, + ...(doc.encryptedValue + ? { + encryptedValue: doc.encryptedValue, + references: doc.value ? getAllSecretReferences(doc.value).nestedReferences : [] + } + : { + encryptedValue: undefined, + references: undefined + }) + } + }; + }) + }); + } + + isDestinationUpdated = true; + } + + // Next step is to delete the secrets from the source folder: + const sourceSecretsGroupByKey = groupBy(sourceSecrets, (i) => i.key); + const locallyDeletedSecrets = decryptedSourceSecrets.map((el) => ({ ...el, operation: SecretOperations.Delete })); + + const sourceFolderPolicy = await secretApprovalPolicyService.getSecretApprovalPolicy( + projectId, + sourceFolder.environment.slug, + sourceFolder.path + ); + + if (sourceFolderPolicy && actor === ActorType.USER) { + // if secret approval policy exists for source, we create the secret approval request + const localSecretsIds = decryptedSourceSecrets.map(({ id }) => id); + const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(sourceFolder.id, localSecretsIds, tx); + const approvalRequestDoc = await secretApprovalRequestDAL.create( + { + folderId: sourceFolder.id, + slug: alphaNumericNanoId(), + policyId: sourceFolderPolicy.id, + status: "open", + hasMerged: false, + committerUserId: actorId + }, + tx + ); + + const commits = locallyDeletedSecrets.map((doc) => { + const { operation } = doc; + const localSecret = sourceSecretsGroupByKey[doc.key]?.[0]; + + return { + op: operation, + requestId: approvalRequestDoc.id, + metadata: doc.metadata, + key: doc.key, + encryptedComment: doc.encryptedComment, + encryptedValue: doc.encryptedValue, + skipMultilineEncoding: doc.skipMultilineEncoding, + secretId: localSecret.id, + secretVersion: latestSecretVersions[localSecret.id].id + }; + }); + + await secretApprovalRequestSecretDAL.insertV2Bridge(commits, tx); + } else { + // if no secret approval policy is present, we delete directly. + await secretDAL.delete( + { + $in: { + id: locallyDeletedSecrets.map(({ id }) => id) + }, + folderId: sourceFolder.id + }, + tx + ); + + isSourceUpdated = true; + } + }); + + if (isDestinationUpdated) { + await snapshotService.performSnapshot(destinationFolder.id); + await secretQueueService.syncSecrets({ + projectId, + secretPath: destinationFolder.path, + environmentSlug: destinationFolder.environment.slug, + actorId, + actor + }); + } + + if (isSourceUpdated) { + await snapshotService.performSnapshot(sourceFolder.id); + await secretQueueService.syncSecrets({ + projectId, + secretPath: sourceFolder.path, + environmentSlug: sourceFolder.environment.slug, + actorId, + actor + }); + } + + return { + projectId, + isSourceUpdated, + isDestinationUpdated + }; + }; + + const getSecretReferenceTree = async ({ + environment, + secretPath, + projectId, + actor, + actorId, + actorOrgId, + secretName, + actorAuthMethod + }: TGetSecretReferencesTreeDTO) => { + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { environment, secretPath }) + ); + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) + throw new NotFoundError({ + message: "Folder not found for the given environment slug & secret path", + name: "Create secret" + }); + const folderId = folder.id; + + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const secret = await secretDAL.findOne({ + folderId, + key: secretName, + type: SecretType.Shared + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment, + secretPath, + secretName, + secretTags: (secret?.tags || []).map((el) => el.slug) + }) + ); + + const secretValue = secret.encryptedValue + ? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString() + : ""; + + const { getExpandedSecretStackTrace } = expandSecretReferencesFactory({ + projectId, + folderDAL, + secretDAL, + decryptSecretValue: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined), + canExpandValue: (expandEnvironment, expandSecretPath, expandSecretName, expandSecretTags) => + permission.can( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.Secrets, { + environment: expandEnvironment, + secretPath: expandSecretPath, + secretName: expandSecretName, + secretTags: expandSecretTags + }) + ) + }); + + const { expandedValue, stackTrace } = await getExpandedSecretStackTrace({ + environment, + secretPath, + value: secretValue + }); + + return { tree: stackTrace, value: expandedValue }; + }; + + return { + createSecret, + deleteSecret, + updateSecret, + createManySecret, + updateManySecret, + deleteManySecret, + getSecretByName, + getSecrets, + getSecretVersions, + backfillSecretReferences, + moveSecrets, + getSecretsCount, + getSecretsCountMultiEnv, + getSecretsMultiEnv, + getSecretReferenceTree, + getSecretsByFolderMappings + }; +}; diff --git a/backend/src/services/secret-v2-bridge/secret-v2-bridge-types.ts b/backend/src/services/secret-v2-bridge/secret-v2-bridge-types.ts new file mode 100644 index 0000000000..7216989ff8 --- /dev/null +++ b/backend/src/services/secret-v2-bridge/secret-v2-bridge-types.ts @@ -0,0 +1,306 @@ +import { Knex } from "knex"; + +import { SecretType, TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas"; +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { SecretsOrderBy } from "@app/services/secret/secret-types"; +import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; +import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; + +import { TSecretV2BridgeDALFactory } from "./secret-v2-bridge-dal"; +import { TSecretVersionV2DALFactory } from "./secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "./secret-version-tag-dal"; + +type TPartialSecret = Pick; + +type TPartialInputSecret = Pick; + +export type TSecretReferenceDTO = { + environment: string; + secretPath: string; + secretKey: string; +}; + +export type TGetSecretsDTO = { + expandSecretReferences?: boolean; + path: string; + environment: string; + includeImports?: boolean; + recursive?: boolean; + tagSlugs?: string[]; + orderBy?: SecretsOrderBy; + orderDirection?: OrderByDirection; + offset?: number; + limit?: number; + search?: string; + keys?: string[]; +} & TProjectPermission; + +export type TGetASecretDTO = { + secretName: string; + path: string; + environment: string; + expandSecretReferences?: boolean; + type: "shared" | "personal"; + includeImports?: boolean; + version?: number; + projectId: string; +} & Omit; + +export type TCreateSecretDTO = TProjectPermission & { + secretName: string; + secretPath: string; + environment: string; + secretValue: string; + type: SecretType; + tagIds?: string[]; + secretComment?: string; + skipMultilineEncoding?: boolean; + secretReminderRepeatDays?: number | null; + secretReminderNote?: string | null; +}; + +export type TUpdateSecretDTO = TProjectPermission & { + secretPath: string; + environment: string; + secretName: string; + secretValue?: string; + newSecretName?: string; + secretComment?: string; + type: SecretType; + tagIds?: string[]; + skipMultilineEncoding?: boolean; + secretReminderRepeatDays?: number | null; + secretReminderNote?: string | null; + metadata?: { + source?: string; + }; +}; + +export type TDeleteSecretDTO = TProjectPermission & { + secretPath: string; + environment: string; + secretName: string; + type: SecretType; +}; + +export type TCreateManySecretDTO = Omit & { + secretPath: string; + projectId: string; + environment: string; + secrets: { + secretKey: string; + secretValue: string; + secretComment?: string; + skipMultilineEncoding?: boolean; + tagIds?: string[]; + metadata?: { + source?: string; + }; + }[]; +}; + +export type TUpdateManySecretDTO = Omit & { + secretPath: string; + projectId: string; + environment: string; + secrets: { + secretKey: string; + newSecretName?: string; + secretValue: string; + secretComment?: string; + skipMultilineEncoding?: boolean; + tagIds?: string[]; + secretReminderRepeatDays?: number | null; + secretReminderNote?: string | null; + }[]; +}; + +export type TDeleteManySecretDTO = Omit & { + secretPath: string; + projectId: string; + environment: string; + secrets: { + secretKey: string; + type?: SecretType; + }[]; +}; + +export type TGetSecretVersionsDTO = Omit & { + limit?: number; + offset?: number; + secretId: string; +}; + +export type TSecretReference = { environment: string; secretPath: string; secretKey: string }; + +export type TFnSecretBulkInsert = { + folderId: string; + tx?: Knex; + inputSecrets: Array & { tagIds?: string[]; references: TSecretReference[] }>; + secretDAL: Pick; + secretVersionDAL: Pick; + secretTagDAL: Pick; + secretVersionTagDAL: Pick; +}; + +type TRequireReferenceIfValue = + | (Omit & { + encryptedValue: Buffer | null; + references: TSecretReference[]; + }) + | (Omit & { + encryptedValue?: never; + references?: never; + }); + +export type TFnSecretBulkUpdate = { + folderId: string; + inputSecrets: { + filter: Partial; + data: TRequireReferenceIfValue & { tags?: string[] }; + }[]; + secretDAL: Pick; + secretVersionDAL: Pick; + secretTagDAL: Pick; + secretVersionTagDAL: Pick; + tx?: Knex; +}; + +export type TFnSecretBulkDelete = { + folderId: string; + projectId: string; + inputSecrets: Array<{ type: SecretType; secretKey: string }>; + actorId: string; + tx?: Knex; + secretDAL: Pick; + secretQueueService: { + removeSecretReminder: (data: TRemoveSecretReminderDTO) => Promise; + }; +}; + +export type THandleReminderDTO = { + newSecret: TPartialInputSecret; + oldSecret: TPartialSecret; + projectId: string; +}; + +export type TCreateSecretReminderDTO = { + oldSecret: TPartialSecret; + newSecret: TPartialSecret; + projectId: string; +}; + +export type TRemoveSecretReminderDTO = { + secretId: string; + repeatDays: number; +}; + +export type TBackFillSecretReferencesDTO = TProjectPermission; + +export type TCreateManySecretsFnFactory = { + projectDAL: TProjectDALFactory; + secretDAL: TSecretV2BridgeDALFactory; + secretVersionDAL: TSecretVersionV2DALFactory; + secretTagDAL: TSecretTagDALFactory; + secretVersionTagDAL: TSecretVersionV2TagDALFactory; + folderDAL: TSecretFolderDALFactory; +}; + +export type TCreateManySecretsFn = { + projectId: string; + environment: string; + path: string; + secrets: { + secretName: string; + secretValue: string; + type: SecretType; + secretComment?: string; + skipMultilineEncoding?: boolean; + tags?: string[]; + metadata?: { + source?: string; + }; + }[]; + userId?: string; // only relevant for personal secret(s) +}; + +export type TUpdateManySecretsFnFactory = { + projectDAL: TProjectDALFactory; + secretDAL: TSecretV2BridgeDALFactory; + secretVersionDAL: TSecretVersionV2DALFactory; + secretTagDAL: TSecretTagDALFactory; + secretVersionTagDAL: TSecretVersionV2TagDALFactory; + folderDAL: TSecretFolderDALFactory; +}; + +export type TUpdateManySecretsFn = { + projectId: string; + environment: string; + path: string; + secrets: { + secretName: string; + newSecretName?: string; + secretValue: string; + type: SecretType; + secretComment?: string; + skipMultilineEncoding?: boolean; + secretReminderRepeatDays?: number | null; + secretReminderNote?: string | null; + tags?: string[]; + metadata?: { + source?: string; + }; + }[]; + userId?: string; +}; + +export enum SecretOperations { + Create = "create", + Update = "update", + Delete = "delete" +} + +export type TMoveSecretsDTO = { + projectId: string; + sourceEnvironment: string; + sourceSecretPath: string; + destinationEnvironment: string; + destinationSecretPath: string; + secretIds: string[]; + shouldOverwrite: boolean; +} & Omit; + +export type TAttachSecretTagsDTO = { + projectId: string; + secretName: string; + tagSlugs: string[]; + environment: string; + secretPath: string; + type: SecretType; +} & Omit; + +export type TGetSecretReferencesTreeDTO = { + projectId: string; + secretName: string; + environment: string; + secretPath: string; +} & Omit; + +export type TFindSecretsByFolderIdsFilter = { + limit?: number; + offset?: number; + orderBy?: SecretsOrderBy; + orderDirection?: OrderByDirection; + search?: string; + tagSlugs?: string[]; + includeTagsInSearch?: boolean; + keys?: string[]; +}; + +export type TGetSecretsRawByFolderMappingsDTO = { + projectId: string; + folderMappings: { folderId: string; path: string; environment: string }[]; + userId: string; + filters: TFindSecretsByFolderIdsFilter; +}; diff --git a/backend/src/services/secret-v2-bridge/secret-version-dal.ts b/backend/src/services/secret-v2-bridge/secret-version-dal.ts new file mode 100644 index 0000000000..a0bce5371b --- /dev/null +++ b/backend/src/services/secret-v2-bridge/secret-version-dal.ts @@ -0,0 +1,128 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName, TSecretVersionsV2, TSecretVersionsV2Update } from "@app/db/schemas"; +import { BadRequestError, DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; + +export type TSecretVersionV2DALFactory = ReturnType; + +export const secretVersionV2BridgeDALFactory = (db: TDbClient) => { + const secretVersionV2Orm = ormify(db, TableName.SecretVersionV2); + + // This will fetch all latest secret versions from a folder + const findLatestVersionByFolderId = async (folderId: string, tx?: Knex) => { + try { + const docs = await (tx || db.replicaNode())(TableName.SecretVersionV2) + .where(`${TableName.SecretVersionV2}.folderId`, folderId) + .join(TableName.SecretV2, `${TableName.SecretV2}.id`, `${TableName.SecretVersionV2}.secretId`) + .join( + (tx || db)(TableName.SecretVersionV2) + .groupBy("folderId", "secretId") + .max("version") + .select("secretId") + .as("latestVersion"), + (bd) => { + bd.on(`${TableName.SecretVersionV2}.secretId`, "latestVersion.secretId").andOn( + `${TableName.SecretVersionV2}.version`, + "latestVersion.max" + ); + } + ) + .select(selectAllTableCols(TableName.SecretVersionV2)); + return docs; + } catch (error) { + throw new DatabaseError({ error, name: "FindLatestVersionByFolderId" }); + } + }; + + const bulkUpdate = async ( + data: Array<{ filter: Partial; data: TSecretVersionsV2Update }>, + tx?: Knex + ) => { + try { + const secs = await Promise.all( + data.map(async ({ filter, data: updateData }) => { + const [doc] = await (tx || db)(TableName.SecretVersionV2) + .where(filter) + .update(updateData) + .increment("version", 1) // TODO: Is this really needed? + .returning("*"); + if (!doc) throw new BadRequestError({ message: "Failed to update document" }); + return doc; + }) + ); + return secs; + } catch (error) { + throw new DatabaseError({ error, name: "bulk update secret" }); + } + }; + + const findLatestVersionMany = async (folderId: string, secretIds: string[], tx?: Knex) => { + try { + if (!secretIds.length) return {}; + const docs: Array = await (tx || db.replicaNode())(TableName.SecretVersionV2) + .where("folderId", folderId) + .whereIn(`${TableName.SecretVersionV2}.secretId`, secretIds) + .join( + (tx || db)(TableName.SecretVersionV2) + .groupBy("secretId") + .max("version") + .select("secretId") + .as("latestVersion"), + (bd) => { + bd.on(`${TableName.SecretVersionV2}.secretId`, "latestVersion.secretId").andOn( + `${TableName.SecretVersionV2}.version`, + "latestVersion.max" + ); + } + ); + return docs.reduce>( + (prev, curr) => ({ ...prev, [curr.secretId || ""]: curr }), + {} + ); + } catch (error) { + throw new DatabaseError({ error, name: "FindLatestVersinMany" }); + } + }; + + const pruneExcessVersions = async () => { + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 started`); + try { + await db(TableName.SecretVersionV2) + .with("version_cte", (qb) => { + void qb + .from(TableName.SecretVersionV2) + .select( + "id", + "folderId", + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretVersionV2}."secretId" ORDER BY ${TableName.SecretVersionV2}."createdAt" DESC) AS row_num` + ) + ); + }) + .join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.SecretVersionV2}.folderId`) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("version_cte", "version_cte.id", `${TableName.SecretVersionV2}.id`) + .whereRaw(`version_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (error) { + throw new DatabaseError({ + error, + name: "Secret Version Prune" + }); + } + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 completed`); + }; + + return { + ...secretVersionV2Orm, + pruneExcessVersions, + findLatestVersionMany, + bulkUpdate, + findLatestVersionByFolderId + }; +}; diff --git a/backend/src/services/secret-v2-bridge/secret-version-tag-dal.ts b/backend/src/services/secret-v2-bridge/secret-version-tag-dal.ts new file mode 100644 index 0000000000..f1a3531e96 --- /dev/null +++ b/backend/src/services/secret-v2-bridge/secret-version-tag-dal.ts @@ -0,0 +1,10 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TSecretVersionV2TagDALFactory = ReturnType; + +export const secretVersionV2TagBridgeDALFactory = (db: TDbClient) => { + const secretVersionTagDAL = ormify(db, TableName.SecretVersionV2Tag); + return secretVersionTagDAL; +}; diff --git a/backend/src/services/secret/secret-dal.ts b/backend/src/services/secret/secret-dal.ts index 1a2e414dd9..0d4ae0cda8 100644 --- a/backend/src/services/secret/secret-dal.ts +++ b/backend/src/services/secret/secret-dal.ts @@ -3,7 +3,7 @@ import { validate as uuidValidate } from "uuid"; import { TDbClient } from "@app/db"; import { SecretsSchema, SecretType, TableName, TSecrets, TSecretsUpdate } from "@app/db/schemas"; -import { BadRequestError, DatabaseError } from "@app/lib/errors"; +import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; export type TSecretDALFactory = ReturnType; @@ -20,8 +20,6 @@ export const secretDALFactory = (db: TDbClient) => { } }; - // the idea is to use postgres specific function - // insert with id this will cause a conflict then merge the data const bulkUpdate = async ( data: Array<{ filter: Partial; data: TSecretsUpdate }>, @@ -57,7 +55,7 @@ export const secretDALFactory = (db: TDbClient) => { ); if (existingSecrets.length !== data.length) { - throw new BadRequestError({ message: "Some of the secrets do not exist" }); + throw new NotFoundError({ message: "Some of the secrets do not exist" }); } if (data.length === 0) return []; @@ -114,7 +112,7 @@ export const secretDALFactory = (db: TDbClient) => { userId = undefined; } - const secs = await (tx || db)(TableName.Secret) + const secs = await (tx || db.replicaNode())(TableName.Secret) .where({ folderId }) .where((bd) => { void bd.whereNull("userId").orWhere({ userId: userId || null }); @@ -125,7 +123,6 @@ export const secretDALFactory = (db: TDbClient) => { .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")) - .select(db.ref("name").withSchema(TableName.SecretTag).as("tagName")) .orderBy("id", "asc"); const data = sqlNestRelationships({ data: secs, @@ -135,11 +132,11 @@ export const secretDALFactory = (db: TDbClient) => { { key: "tagId", label: "tags" as const, - mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({ + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ id, color, slug, - name + name: slug }) } ] @@ -152,19 +149,18 @@ export const secretDALFactory = (db: TDbClient) => { const getSecretTags = async (secretId: string, tx?: Knex) => { try { - const tags = await (tx || db)(TableName.JnSecretTag) + const tags = await (tx || db.replicaNode())(TableName.JnSecretTag) .join(TableName.SecretTag, `${TableName.JnSecretTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`) .where({ [`${TableName.Secret}Id` as const]: secretId }) .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) - .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")) - .select(db.ref("name").withSchema(TableName.SecretTag).as("tagName")); + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")); return tags.map((el) => ({ id: el.tagId, color: el.tagColor, slug: el.tagSlug, - name: el.tagName + name: el.tagSlug })); } catch (error) { throw new DatabaseError({ error, name: "get secret tags" }); @@ -179,7 +175,7 @@ export const secretDALFactory = (db: TDbClient) => { userId = undefined; } - const secs = await (tx || db)(TableName.Secret) + const secs = await (tx || db.replicaNode())(TableName.Secret) .whereIn("folderId", folderIds) .where((bd) => { void bd.whereNull("userId").orWhere({ userId: userId || null }); @@ -190,7 +186,6 @@ export const secretDALFactory = (db: TDbClient) => { .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")) - .select(db.ref("name").withSchema(TableName.SecretTag).as("tagName")) .orderBy("id", "asc"); const data = sqlNestRelationships({ data: secs, @@ -200,11 +195,11 @@ export const secretDALFactory = (db: TDbClient) => { { key: "tagId", label: "tags" as const, - mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({ + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ id, color, slug, - name + name: slug }) } ] @@ -223,7 +218,7 @@ export const secretDALFactory = (db: TDbClient) => { ) => { if (!blindIndexes.length) return []; try { - const secrets = await (tx || db)(TableName.Secret) + const secrets = await (tx || db.replicaNode())(TableName.Secret) .where({ folderId }) .where((bd) => { blindIndexes.forEach((el) => { @@ -278,7 +273,7 @@ export const secretDALFactory = (db: TDbClient) => { const findReferencedSecretReferences = async (projectId: string, envSlug: string, secretPath: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.SecretReference) + const docs = await (tx || db.replicaNode())(TableName.SecretReference) .where({ secretPath, environment: envSlug @@ -298,7 +293,7 @@ export const secretDALFactory = (db: TDbClient) => { // special query to backfill secret value const findAllProjectSecretValues = async (projectId: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.Secret) + const docs = await (tx || db.replicaNode())(TableName.Secret) .join(TableName.SecretFolder, `${TableName.Secret}.folderId`, `${TableName.SecretFolder}.id`) .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) .where("projectId", projectId) @@ -311,6 +306,39 @@ export const secretDALFactory = (db: TDbClient) => { } }; + const findOneWithTags = async (filter: Partial, tx?: Knex) => { + try { + const rawDocs = await (tx || db.replicaNode())(TableName.Secret) + .where(filter) + .leftJoin(TableName.JnSecretTag, `${TableName.Secret}.id`, `${TableName.JnSecretTag}.${TableName.Secret}Id`) + .leftJoin(TableName.SecretTag, `${TableName.JnSecretTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`) + .select(selectAllTableCols(TableName.Secret)) + .select(db.ref("id").withSchema(TableName.SecretTag).as("tagId")) + .select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor")) + .select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")); + const docs = sqlNestRelationships({ + data: rawDocs, + key: "id", + parentMapper: (el) => ({ _id: el.id, ...SecretsSchema.parse(el) }), + childrenMapper: [ + { + key: "tagId", + label: "tags" as const, + mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({ + id, + color, + slug, + name: slug + }) + } + ] + }); + return docs?.[0]; + } catch (error) { + throw new DatabaseError({ error, name: "FindOneWIthTags" }); + } + }; + return { ...secretOrm, update, @@ -318,6 +346,7 @@ export const secretDALFactory = (db: TDbClient) => { deleteMany, bulkUpdateNoVersionIncrement, getSecretTags, + findOneWithTags, findByFolderId, findByFolderIds, findByBlindIndexes, diff --git a/backend/src/services/secret/secret-fns.ts b/backend/src/services/secret/secret-fns.ts index 6758f48157..65691fcbb8 100644 --- a/backend/src/services/secret/secret-fns.ts +++ b/backend/src/services/secret/secret-fns.ts @@ -19,11 +19,17 @@ import { decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy, unique } from "@app/lib/fn"; import { logger } from "@app/lib/logger"; +import { + fnSecretBulkInsert as fnSecretV2BridgeBulkInsert, + fnSecretBulkUpdate as fnSecretV2BridgeBulkUpdate, + getAllSecretReferences +} from "@app/services/secret-v2-bridge/secret-v2-bridge-fns"; import { ActorAuthMethod, ActorType } from "../auth/auth-type"; +import { KmsDataKey } from "../kms/kms-types"; import { getBotKeyFnFactory } from "../project-bot/project-bot-fns"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; @@ -146,7 +152,9 @@ export const recursivelyGetSecretPaths = ({ }); if (!env) { - throw new Error(`'${environment}' environment not found in project with ID ${projectId}`); + throw new NotFoundError({ + message: `Environment with slug '${environment}' in project with ID '${projectId}' not found` + }); } // Fetch all folders in env once with a single query @@ -190,6 +198,13 @@ export const recursivelyGetSecretPaths = ({ return getPaths; }; +// used to convert multi line ones to quotes ones with \n +const formatMultiValueEnv = (val?: string) => { + if (!val) return ""; + if (!val.match("\n")) return val; + return `"${val.replace(/\n/g, "\\n")}"`; +}; + type TInterpolateSecretArg = { projectId: string; secretEncKey: string; @@ -197,166 +212,141 @@ type TInterpolateSecretArg = { folderDAL: Pick; }; +const MAX_SECRET_REFERENCE_DEPTH = 5; const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g; export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderDAL }: TInterpolateSecretArg) => { - const fetchSecretsCrossEnv = () => { - const fetchCache: Record> = {}; + const secretCache: Record> = {}; + const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`; - return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => { - const secRefPathUrl = path.join("/", ...secRefPath); - const uniqKey = `${secRefEnv}-${secRefPathUrl}`; + const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => { + const cacheKey = getCacheUniqueKey(environment, secretPath); + const uniqKey = `${environment}-${cacheKey}`; - if (fetchCache?.[uniqKey]) { - return fetchCache[uniqKey][secRefKey]; - } + if (secretCache?.[uniqKey]) { + return secretCache[uniqKey][secretKey] || ""; + } - const folder = await folderDAL.findBySecretPath(projectId, secRefEnv, secRefPathUrl); - if (!folder) return ""; - const secrets = await secretDAL.findByFolderId(folder.id); + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) return ""; + const secrets = await secretDAL.findByFolderId(folder.id); - const decryptedSec = secrets.reduce>((prev, secret) => { - const secretKey = decryptSymmetric128BitHexKeyUTF8({ - ciphertext: secret.secretKeyCiphertext, - iv: secret.secretKeyIV, - tag: secret.secretKeyTag, - key: secretEncKey - }); - const secretValue = decryptSymmetric128BitHexKeyUTF8({ - ciphertext: secret.secretValueCiphertext, - iv: secret.secretValueIV, - tag: secret.secretValueTag, - key: secretEncKey - }); + const decryptedSec = secrets.reduce>((prev, secret) => { + const decryptedSecretKey = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretKeyCiphertext, + iv: secret.secretKeyIV, + tag: secret.secretKeyTag, + key: secretEncKey + }); + const decryptedSecretValue = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretValueCiphertext, + iv: secret.secretValueIV, + tag: secret.secretValueTag, + key: secretEncKey + }); - // eslint-disable-next-line - prev[secretKey] = secretValue; - return prev; - }, {}); + // eslint-disable-next-line + prev[decryptedSecretKey] = decryptedSecretValue; + return prev; + }, {}); - fetchCache[uniqKey] = decryptedSec; + secretCache[uniqKey] = decryptedSec; - return fetchCache[uniqKey][secRefKey]; - }; + return secretCache[uniqKey][secretKey] || ""; }; - const recursivelyExpandSecret = async ( - expandedSec: Record, - interpolatedSec: Record, - fetchCrossEnv: (env: string, secPath: string[], secKey: string) => Promise, - recursionChainBreaker: Record, - key: string - ) => { - if (expandedSec?.[key] !== undefined) { - return expandedSec[key]; - } - if (recursionChainBreaker?.[key]) { - return ""; - } - // eslint-disable-next-line - recursionChainBreaker[key] = true; + const recursivelyExpandSecret = async ({ + value, + secretPath, + environment, + depth = 0 + }: { + value?: string; + secretPath: string; + environment: string; + depth?: number; + }) => { + if (!value) return ""; + if (depth > MAX_SECRET_REFERENCE_DEPTH) return ""; - let interpolatedValue = interpolatedSec[key]; - if (!interpolatedValue) { - // eslint-disable-next-line no-console - console.error(`Couldn't find referenced value - ${key}`); - return ""; - } - - const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG); + const refs = value.match(INTERPOLATION_SYNTAX_REG); + let expandedValue = value; if (refs) { for (const interpolationSyntax of refs) { const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1); const entities = interpolationKey.trim().split("."); if (entities.length === 1) { - const val = await recursivelyExpandSecret( - expandedSec, - interpolatedSec, - fetchCrossEnv, - recursionChainBreaker, - interpolationKey - ); - if (val) { - interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val); - } + const [secretKey] = entities; // eslint-disable-next-line - continue; + let referenceValue = await fetchSecret(environment, secretPath, secretKey); + if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) { + // eslint-disable-next-line + referenceValue = await recursivelyExpandSecret({ + environment, + secretPath, + value: referenceValue, + depth: depth + 1 + }); + } + const cacheKey = getCacheUniqueKey(environment, secretPath); + secretCache[cacheKey][secretKey] = referenceValue; + expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue); } if (entities.length > 1) { - const secRefEnv = entities[0]; - const secRefPath = entities.slice(1, entities.length - 1); - const secRefKey = entities[entities.length - 1]; + const secretReferenceEnvironment = entities[0]; + const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1)); + const secretReferenceKey = entities[entities.length - 1]; - const val = await fetchCrossEnv(secRefEnv, secRefPath, secRefKey); - if (val) { - interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val); + // eslint-disable-next-line + let referenceValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey); + if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) { + // eslint-disable-next-line + referenceValue = await recursivelyExpandSecret({ + environment: secretReferenceEnvironment, + secretPath: secretReferencePath, + value: referenceValue, + depth: depth + 1 + }); } + const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath); + secretCache[cacheKey][secretReferenceKey] = referenceValue; + expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue); } } } - // eslint-disable-next-line - expandedSec[key] = interpolatedValue; - return interpolatedValue; + return expandedValue; }; - // used to convert multi line ones to quotes ones with \n - const formatMultiValueEnv = (val?: string) => { - if (!val) return ""; - if (!val.match("\n")) return val; - return `"${val.replace(/\n/g, "\\n")}"`; + const expandSecret = async (inputSecret: { + value?: string; + skipMultilineEncoding?: boolean | null; + secretPath: string; + environment: string; + }) => { + if (!inputSecret.value) return inputSecret.value; + + const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG)); + if (!shouldExpand) return inputSecret.value; + + const expandedSecretValue = await recursivelyExpandSecret(inputSecret); + return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedSecretValue) : expandedSecretValue; }; - - const expandSecrets = async ( - secrets: Record - ) => { - const expandedSec: Record = {}; - const interpolatedSec: Record = {}; - - const crossSecEnvFetch = fetchSecretsCrossEnv(); - - Object.keys(secrets).forEach((key) => { - if (secrets[key].value.match(INTERPOLATION_SYNTAX_REG)) { - interpolatedSec[key] = secrets[key].value; - } else { - expandedSec[key] = secrets[key].value; - } - }); - - for (const key of Object.keys(secrets)) { - if (expandedSec?.[key]) { - // should not do multi line encoding if user has set it to skip - // eslint-disable-next-line - secrets[key].value = secrets[key].skipMultilineEncoding - ? formatMultiValueEnv(expandedSec[key]) - : expandedSec[key]; - // eslint-disable-next-line - continue; - } - - // this is to avoid recursion loop. So the graph should be direct graph rather than cyclic - // so for any recursion building if there is an entity two times same key meaning it will be looped - const recursionChainBreaker: Record = {}; - const expandedVal = await recursivelyExpandSecret( - expandedSec, - interpolatedSec, - crossSecEnvFetch, - recursionChainBreaker, - key - ); - - // eslint-disable-next-line - secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal; - } - - return secrets; - }; - return expandSecrets; + return expandSecret; }; export const decryptSecretRaw = ( - secret: TSecrets & { workspace: string; environment: string; secretPath: string }, + secret: TSecrets & { + workspace: string; + environment: string; + secretPath: string; + tags?: { + id: string; + slug: string; + color?: string | null; + }[]; + }, key: string ) => { const secretKey = decryptSymmetric128BitHexKeyUTF8({ @@ -396,7 +386,13 @@ export const decryptSecretRaw = ( _id: secret.id, id: secret.id, user: secret.userId, - skipMultilineEncoding: secret.skipMultilineEncoding + tags: secret.tags?.map((el) => ({ ...el, name: el.slug })), + skipMultilineEncoding: secret.skipMultilineEncoding, + secretReminderRepeatDays: secret.secretReminderRepeatDays, + secretReminderNote: secret.secretReminderNote, + metadata: secret.metadata, + createdAt: secret.createdAt, + updatedAt: secret.updatedAt }; }; @@ -504,7 +500,7 @@ export const fnSecretBlindIndexCheck = async ({ const hasUnknownSecretsProvided = secretKeysInDB.length !== inputSecrets.length; if (hasUnknownSecretsProvided) { const keysMissingInDB = Object.keys(keyName2BlindIndex).filter((key) => !secretKeysInDB.includes(key)); - throw new BadRequestError({ + throw new NotFoundError({ message: `Secret not found: blind index ${keysMissingInDB.join(",")}` }); } @@ -525,10 +521,51 @@ export const fnSecretBulkInsert = async ({ secretVersionTagDAL, tx }: TFnSecretBulkInsert) => { - const newSecrets = await secretDAL.insertMany( - inputSecrets.map(({ tags, references, ...el }) => ({ ...el, folderId })), - tx + const sanitizedInputSecrets = inputSecrets.map( + ({ + skipMultilineEncoding, + type, + userId, + version, + metadata, + algorithm, + secretKeyIV, + secretKeyTag, + secretValueIV, + keyEncoding, + secretValueTag, + secretCommentIV, + secretBlindIndex, + secretCommentTag, + secretKeyCiphertext, + secretReminderNote, + secretValueCiphertext, + secretCommentCiphertext, + secretReminderRepeatDays + }) => ({ + skipMultilineEncoding, + folderId, + type, + userId, + version, + metadata, + algorithm, + secretKeyIV, + secretKeyTag, + secretValueIV, + keyEncoding, + secretValueTag, + secretCommentIV, + secretBlindIndex, + secretCommentTag, + secretKeyCiphertext, + secretReminderNote, + secretValueCiphertext, + secretCommentCiphertext, + secretReminderRepeatDays + }) ); + const newSecrets = await secretDAL.insertMany(sanitizedInputSecrets, tx); const newSecretGroupByBlindIndex = groupBy(newSecrets, (item) => item.secretBlindIndex as string); const newSecretTags = inputSecrets.flatMap(({ tags: secretTags = [], secretBlindIndex }) => secretTags.map((tag) => ({ @@ -537,9 +574,8 @@ export const fnSecretBulkInsert = async ({ })) ); const secretVersions = await secretVersionDAL.insertMany( - inputSecrets.map(({ tags, references, ...el }) => ({ + sanitizedInputSecrets.map((el) => ({ ...el, - folderId, secretId: newSecretGroupByBlindIndex[el.secretBlindIndex as string][0].id })), tx @@ -574,13 +610,55 @@ export const fnSecretBulkUpdate = async ({ secretTagDAL, secretVersionTagDAL }: TFnSecretBulkUpdate) => { - const newSecrets = await secretDAL.bulkUpdate( - inputSecrets.map(({ filter, data: { tags, references, ...data } }) => ({ + const sanitizedInputSecrets = inputSecrets.map( + ({ + filter, + data: { + skipMultilineEncoding, + type, + userId, + metadata, + algorithm, + secretKeyIV, + secretKeyTag, + secretValueIV, + keyEncoding, + secretValueTag, + secretCommentIV, + secretBlindIndex, + secretCommentTag, + secretKeyCiphertext, + secretReminderNote, + secretValueCiphertext, + secretCommentCiphertext, + secretReminderRepeatDays + } + }) => ({ filter: { ...filter, folderId }, - data - })), - tx + data: { + skipMultilineEncoding, + type, + userId, + metadata, + algorithm, + secretKeyIV, + secretKeyTag, + secretValueIV, + keyEncoding, + secretValueTag, + secretCommentIV, + secretBlindIndex, + secretCommentTag, + secretKeyCiphertext, + secretReminderNote, + secretValueCiphertext, + secretCommentCiphertext, + secretReminderRepeatDays + } + }) ); + + const newSecrets = await secretDAL.bulkUpdate(sanitizedInputSecrets, tx); const secretVersions = await secretVersionDAL.insertMany( newSecrets.map(({ id, createdAt, updatedAt, ...el }) => ({ ...el, @@ -663,7 +741,11 @@ export const createManySecretsRawFnFactory = ({ secretBlindIndexDAL, secretTagDAL, secretVersionTagDAL, - folderDAL + folderDAL, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + secretVersionTagV2BridgeDAL, + kmsService }: TCreateManySecretsRawFnFactory) => { const getBotKeyFn = getBotKeyFnFactory(projectBotDAL, projectDAL); const createManySecretsRawFn = async ({ @@ -673,21 +755,74 @@ export const createManySecretsRawFnFactory = ({ secrets, userId }: TCreateManySecretsRawFn) => { - const botKey = await getBotKeyFn(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); - - await projectDAL.checkProjectUpgradeStatus(projectId); + const { botKey, shouldUseSecretV2Bridge } = await getBotKeyFn(projectId); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", + throw new NotFoundError({ + message: `Folder with path '${secretPath}' not found in environment with slug '${environment}'`, name: "Create secret" }); const folderId = folder.id; + if (shouldUseSecretV2Bridge) { + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const secretsStoredInDB = await secretV2BridgeDAL.findBySecretKeys( + folderId, + secrets.map((el) => ({ + key: el.secretName, + type: SecretType.Shared + })) + ); + if (secretsStoredInDB.length) + throw new BadRequestError({ + message: `Secret already exist: ${secretsStoredInDB.map((el) => el.key).join(",")}` + }); + + const inputSecrets = secrets.map((secret) => { + return { + type: secret.type, + userId: secret.type === SecretType.Personal ? userId : null, + key: secret.secretName, + encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secret.secretValue) }).cipherTextBlob, + encryptedComent: secret.secretComment + ? secretManagerEncryptor({ plainText: Buffer.from(secret.secretComment) }).cipherTextBlob + : null, + skipMultilineEncoding: secret.skipMultilineEncoding, + tags: secret.tags, + references: getAllSecretReferences(secret.secretValue).nestedReferences + }; + }); + + // get all tags + const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags); + const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; + if (tags.length !== tagIds.length) throw new NotFoundError({ message: "One or more tags not found" }); + + const newSecrets = await secretDAL.transaction(async (tx) => + fnSecretV2BridgeBulkInsert({ + inputSecrets: inputSecrets.map((el) => ({ + ...el, + version: 1, + tagIds: el.tags + })), + folderId, + secretDAL: secretV2BridgeDAL, + secretVersionDAL: secretVersionV2BridgeDAL, + secretTagDAL, + secretVersionTagDAL: secretVersionTagV2BridgeDAL, + tx + }) + ); + + return newSecrets; + } const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Create secret" }); + if (!blindIndexCfg) throw new NotFoundError({ message: "Blind index not found", name: "Create secret" }); // insert operation const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({ @@ -699,6 +834,11 @@ export const createManySecretsRawFnFactory = ({ secretDAL }); + if (!botKey) + throw new NotFoundError({ + message: `Project bot not found for project with ID '${projectId}'. Please upgrade your project.`, + name: "bot_not_found_error" + }); const inputSecrets = secrets.map((secret) => { const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretName, botKey); const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretValue || "", botKey); @@ -727,11 +867,11 @@ export const createManySecretsRawFnFactory = ({ // get all tags const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags); const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; - if (tags.length !== tagIds.length) throw new BadRequestError({ message: "Tag not found" }); + if (tags.length !== tagIds.length) throw new NotFoundError({ message: "One or more tags not found" }); const newSecrets = await secretDAL.transaction(async (tx) => fnSecretBulkInsert({ - inputSecrets: inputSecrets.map(({ secretName, ...el }) => ({ + inputSecrets: inputSecrets.map(({ secretName, tags: _, ...el }) => ({ ...el, version: 0, secretBlindIndex: keyName2BlindIndex[secretName], @@ -761,7 +901,11 @@ export const updateManySecretsRawFnFactory = ({ secretBlindIndexDAL, secretTagDAL, secretVersionTagDAL, - folderDAL + folderDAL, + secretVersionTagV2BridgeDAL, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + kmsService }: TUpdateManySecretsRawFnFactory) => { const getBotKeyFn = getBotKeyFnFactory(projectBotDAL, projectDAL); const updateManySecretsRawFn = async ({ @@ -770,22 +914,98 @@ export const updateManySecretsRawFnFactory = ({ path: secretPath, secrets, // consider accepting instead ciphertext secrets userId - }: TUpdateManySecretsRawFn): Promise> => { - const botKey = await getBotKeyFn(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); - - await projectDAL.checkProjectUpgradeStatus(projectId); + }: TUpdateManySecretsRawFn): Promise> => { + const { botKey, shouldUseSecretV2Bridge } = await getBotKeyFn(projectId); const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Update secret" + throw new NotFoundError({ + message: `Folder with path '${secretPath}' not found in environment with slug '${environment}'`, + name: "UpdateSecret" }); const folderId = folder.id; + if (shouldUseSecretV2Bridge) { + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + const secretsToUpdate = await secretV2BridgeDAL.findBySecretKeys( + folderId, + secrets.map((el) => ({ + key: el.secretName, + type: SecretType.Shared + })) + ); + if (secretsToUpdate.length !== secrets.length) + throw new NotFoundError({ message: `Secret does not exist: ${secretsToUpdate.map((el) => el.key).join(",")}` }); + + // now find any secret that needs to update its name + // same process as above + const secretsWithNewName = secrets.filter(({ newSecretName }) => Boolean(newSecretName)); + if (secretsWithNewName.length) { + const secretsWithNewNameInDB = await secretV2BridgeDAL.findBySecretKeys( + folderId, + secrets.map((el) => ({ + key: el.secretName, + type: SecretType.Shared + })) + ); + if (secretsWithNewNameInDB.length) + throw new NotFoundError({ + message: `Secret does not exist: ${secretsWithNewName.map((el) => el.newSecretName).join(",")}` + }); + } + + const secretsToUpdateInDBGroupedByKey = groupBy(secretsToUpdate, (i) => i.key); + const inputSecrets = secrets.map((secret) => { + if (secret.newSecretName === "") { + throw new BadRequestError({ message: "New secret name cannot be empty" }); + } + + return { + type: secret.type, + userId: secret.type === SecretType.Personal ? userId : null, + key: secret.newSecretName || secret.secretName, + encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secret.secretValue) }).cipherTextBlob, + encryptedComent: secret.secretComment + ? secretManagerEncryptor({ plainText: Buffer.from(secret.secretComment) }).cipherTextBlob + : null, + skipMultilineEncoding: secret.skipMultilineEncoding, + tags: secret.tags, + references: getAllSecretReferences(secret.secretValue).nestedReferences + }; + }); + + const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags); + const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; + if (tagIds.length !== tags.length) throw new NotFoundError({ message: "One or more tags not found" }); + + const updatedSecrets = await secretDAL.transaction(async (tx) => + fnSecretV2BridgeBulkUpdate({ + folderId, + tx, + inputSecrets: inputSecrets.map((el) => ({ + filter: { id: secretsToUpdateInDBGroupedByKey[el.key][0].id, type: SecretType.Shared }, + data: el + })), + secretDAL: secretV2BridgeDAL, + secretVersionDAL: secretVersionV2BridgeDAL, + secretTagDAL, + secretVersionTagDAL: secretVersionTagV2BridgeDAL + }) + ); + + return updatedSecrets; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot not found for project with ID '${projectId}'. Please upgrade your project.`, + name: "bot_not_found_error" + }); const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Update secret" }); + if (!blindIndexCfg) throw new NotFoundError({ message: "Blind index not found", name: "Update secret" }); const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({ inputSecrets: secrets, @@ -828,7 +1048,7 @@ export const updateManySecretsRawFnFactory = ({ const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags); const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; - if (tagIds.length !== tags.length) throw new BadRequestError({ message: "Tag not found" }); + if (tagIds.length !== tags.length) throw new NotFoundError({ message: "One or more tags not found" }); // now find any secret that needs to update its name // same process as above @@ -871,3 +1091,50 @@ export const updateManySecretsRawFnFactory = ({ return updateManySecretsRawFn; }; + +export const decryptSecretWithBot = ( + secret: Pick< + TSecrets, + | "secretKeyIV" + | "secretKeyTag" + | "secretKeyCiphertext" + | "secretValueIV" + | "secretValueTag" + | "secretValueCiphertext" + | "secretCommentIV" + | "secretCommentTag" + | "secretCommentCiphertext" + >, + key: string +) => { + const secretKey = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretKeyCiphertext, + iv: secret.secretKeyIV, + tag: secret.secretKeyTag, + key + }); + + const secretValue = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretValueCiphertext, + iv: secret.secretValueIV, + tag: secret.secretValueTag, + key + }); + + let secretComment = ""; + + if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) { + secretComment = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretCommentCiphertext, + iv: secret.secretCommentIV, + tag: secret.secretCommentTag, + key + }); + } + + return { + secretKey, + secretValue, + secretComment + }; +}; diff --git a/backend/src/services/secret/secret-queue.ts b/backend/src/services/secret/secret-queue.ts index 42e13b4456..3d75fa4f8c 100644 --- a/backend/src/services/secret/secret-queue.ts +++ b/backend/src/services/secret/secret-queue.ts @@ -1,11 +1,26 @@ /* eslint-disable no-await-in-loop */ import { AxiosError } from "axios"; +import { + ProjectMembershipRole, + ProjectUpgradeStatus, + ProjectVersion, + TSecretSnapshotSecretsV2, + TSecretVersionsV2 +} from "@app/db/schemas"; +import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service"; +import { Actor, EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal"; +import { TSecretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal"; +import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal"; +import { TSnapshotSecretV2DALFactory } from "@app/ee/services/secret-snapshot/snapshot-secret-v2-dal"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; +import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; import { daysToMillisecond, secondsToMillis } from "@app/lib/dates"; -import { BadRequestError } from "@app/lib/errors"; -import { groupBy, isSamePath, unique } from "@app/lib/fn"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { getTimeDifferenceInSeconds, groupBy, isSamePath, unique } from "@app/lib/fn"; import { logger } from "@app/lib/logger"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal"; @@ -15,24 +30,40 @@ import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal"; import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; +import { ActorType } from "../auth/auth-type"; import { TIntegrationDALFactory } from "../integration/integration-dal"; +import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth-dal"; import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service"; import { syncIntegrationSecrets } from "../integration-auth/integration-sync-secret"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; import { TOrgDALFactory } from "../org/org-dal"; +import { TOrgServiceFactory } from "../org/org-service"; import { TProjectDALFactory } from "../project/project-dal"; +import { createProjectKey } from "../project/project-fns"; import { TProjectBotServiceFactory } from "../project-bot/project-bot-service"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; +import { TProjectKeyDALFactory } from "../project-key/project-key-dal"; import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal"; +import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; import { TSecretImportDALFactory } from "../secret-import/secret-import-dal"; +import { fnSecretsV2FromImports } from "../secret-import/secret-import-fns"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; +import { expandSecretReferencesFactory, getAllSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns"; +import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal"; import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; +import { TUserDALFactory } from "../user/user-dal"; import { TWebhookDALFactory } from "../webhook/webhook-dal"; import { fnTriggerWebhook } from "../webhook/webhook-fns"; import { TSecretDALFactory } from "./secret-dal"; import { interpolateSecrets } from "./secret-fns"; import { TCreateSecretReminderDTO, + TFailedIntegrationSyncEmailsPayload, THandleReminderDTO, + TIntegrationSyncPayload, TRemoveSecretReminderDTO, TSyncSecretsDTO } from "./secret-types"; @@ -41,22 +72,37 @@ export type TSecretQueueFactory = ReturnType; type TSecretQueueFactoryDep = { queueService: TQueueServiceFactory; integrationDAL: Pick; + integrationAuthDAL: Pick; projectBotService: Pick; integrationAuthService: Pick; folderDAL: TSecretFolderDALFactory; secretDAL: TSecretDALFactory; - secretImportDAL: Pick; + secretImportDAL: Pick; webhookDAL: Pick; - projectEnvDAL: Pick; + projectEnvDAL: Pick; projectDAL: TProjectDALFactory; projectBotDAL: TProjectBotDALFactory; - projectMembershipDAL: Pick; + projectKeyDAL: Pick; + projectMembershipDAL: Pick; smtpService: TSmtpService; orgDAL: Pick; secretVersionDAL: TSecretVersionDALFactory; secretBlindIndexDAL: TSecretBlindIndexDALFactory; secretTagDAL: TSecretTagDALFactory; + userDAL: Pick; secretVersionTagDAL: TSecretVersionTagDALFactory; + kmsService: TKmsServiceFactory; + secretV2BridgeDAL: TSecretV2BridgeDALFactory; + secretVersionV2BridgeDAL: Pick; + secretVersionTagV2BridgeDAL: Pick; + secretRotationDAL: Pick; + secretApprovalRequestDAL: Pick; + snapshotDAL: Pick; + snapshotSecretV2BridgeDAL: Pick; + keyStore: Pick; + auditLogService: Pick; + orgService: Pick; + projectUserMembershipRoleDAL: Pick; }; export type TGetSecrets = { @@ -66,6 +112,8 @@ export type TGetSecrets = { }; const MAX_SYNC_SECRET_DEPTH = 5; +const SYNC_SECRET_DEBOUNCE_INTERVAL_MS = 3000; + export const uniqueSecretQueueKey = (environment: string, secretPath: string) => `secret-queue-dedupe-${environment}-${secretPath}`; @@ -73,14 +121,18 @@ type TIntegrationSecret = Record< string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null | undefined } >; + +// TODO(akhilmhdh): split this into multiple queue export const secretQueueFactory = ({ queueService, integrationDAL, + integrationAuthDAL, projectBotService, integrationAuthService, secretDAL, secretImportDAL, folderDAL, + userDAL, webhookDAL, projectEnvDAL, orgDAL, @@ -91,7 +143,20 @@ export const secretQueueFactory = ({ secretVersionDAL, secretBlindIndexDAL, secretTagDAL, - secretVersionTagDAL + secretVersionTagDAL, + secretV2BridgeDAL, + secretVersionV2BridgeDAL, + kmsService, + secretVersionTagV2BridgeDAL, + secretRotationDAL, + snapshotDAL, + snapshotSecretV2BridgeDAL, + secretApprovalRequestDAL, + keyStore, + auditLogService, + orgService, + projectUserMembershipRoleDAL, + projectKeyDAL }: TSecretQueueFactoryDep) => { const removeSecretReminder = async (dto: TRemoveSecretReminderDTO) => { const appCfg = getConfig(); @@ -106,6 +171,39 @@ export const secretQueueFactory = ({ ); }; + const $generateActor = async (actorId?: string, isManual?: boolean): Promise => { + if (isManual && actorId) { + const user = await userDAL.findById(actorId); + + if (!user) { + throw new Error("User not found"); + } + + return { + type: ActorType.USER, + metadata: { + email: user.email, + username: user.username, + userId: user.id + } + }; + } + + return { + type: ActorType.PLATFORM, + metadata: {} + }; + }; + + const $getJobKey = (projectId: string, environmentSlug: string, secretPath: string) => { + // the idea here is a timestamp based id which will be constant in a 3s interval + const timestampId = Math.floor(Date.now() / SYNC_SECRET_DEBOUNCE_INTERVAL_MS); + + return `secret-queue-sync_${projectId}_${environmentSlug}_${secretPath}_${timestampId}` + .replace("/", "-") + .replace(":", "-"); + }; + const addSecretReminder = async ({ oldSecret, newSecret, projectId }: TCreateSecretReminderDTO) => { try { const appCfg = getConfig(); @@ -195,7 +293,11 @@ export const secretQueueFactory = ({ secretBlindIndexDAL, secretTagDAL, secretVersionTagDAL, - folderDAL + folderDAL, + kmsService, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + secretVersionTagV2BridgeDAL }); const updateManySecretsRawFn = updateManySecretsRawFnFactory({ @@ -206,9 +308,94 @@ export const secretQueueFactory = ({ secretBlindIndexDAL, secretTagDAL, secretVersionTagDAL, - folderDAL + folderDAL, + kmsService, + secretVersionV2BridgeDAL, + secretV2BridgeDAL, + secretVersionTagV2BridgeDAL }); + /** + * Return the secrets in a given [folderId] including secrets from + * nested imported folders recursively. + */ + const getIntegrationSecretsV2 = async (dto: { + projectId: string; + environment: string; + secretPath: string; + folderId: string; + depth: number; + decryptor: (value: Buffer | null | undefined) => string; + }) => { + const content: TIntegrationSecret = {}; + if (dto.depth > MAX_SYNC_SECRET_DEPTH) { + logger.info( + `getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]` + ); + return content; + } + const { expandSecretReferences } = expandSecretReferencesFactory({ + decryptSecretValue: dto.decryptor, + secretDAL: secretV2BridgeDAL, + folderDAL, + projectId: dto.projectId, + // on integration expand all secrets + canExpandValue: () => true + }); + // process secrets in current folder + const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId); + + await Promise.allSettled( + secrets.map(async (secret) => { + const secretKey = secret.key; + const secretValue = dto.decryptor(secret.encryptedValue); + const expandedSecretValue = await expandSecretReferences({ + environment: dto.environment, + secretPath: dto.secretPath, + skipMultilineEncoding: secret.skipMultilineEncoding, + value: secretValue + }); + content[secretKey] = { value: expandedSecretValue || "" }; + + if (secret.encryptedComment) { + const commentValue = dto.decryptor(secret.encryptedComment); + content[secretKey].comment = commentValue; + } + + content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding); + }) + ); + + // check if current folder has any imports from other folders + const secretImports = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false }); + + // if no imports then return secrets in the current folder + if (!secretImports.length) return content; + const importedSecrets = await fnSecretsV2FromImports({ + decryptor: dto.decryptor, + folderDAL, + secretDAL: secretV2BridgeDAL, + expandSecretReferences, + secretImportDAL, + secretImports, + hasSecretAccess: () => true + }); + + for (let i = importedSecrets.length - 1; i >= 0; i -= 1) { + for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) { + const importedSecret = importedSecrets[i].secrets[j]; + if (!content[importedSecret.key]) { + content[importedSecret.key] = { + skipMultilineEncoding: importedSecret.skipMultilineEncoding, + comment: importedSecret.secretComment, + value: importedSecret.secretValue || "" + }; + } + } + } + return content; + }; + /** * Return the secrets in a given [folderId] including secrets from * nested imported folders recursively. @@ -216,6 +403,7 @@ export const secretQueueFactory = ({ const getIntegrationSecrets = async (dto: { projectId: string; environment: string; + secretPath: string; folderId: string; key: string; depth: number; @@ -228,46 +416,52 @@ export const secretQueueFactory = ({ return content; } - // process secrets in current folder - const secrets = await secretDAL.findByFolderId(dto.folderId); - secrets.forEach((secret) => { - const secretKey = decryptSymmetric128BitHexKeyUTF8({ - ciphertext: secret.secretKeyCiphertext, - iv: secret.secretKeyIV, - tag: secret.secretKeyTag, - key: dto.key - }); - - const secretValue = decryptSymmetric128BitHexKeyUTF8({ - ciphertext: secret.secretValueCiphertext, - iv: secret.secretValueIV, - tag: secret.secretValueTag, - key: dto.key - }); - - content[secretKey] = { value: secretValue }; - - if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) { - const commentValue = decryptSymmetric128BitHexKeyUTF8({ - ciphertext: secret.secretCommentCiphertext, - iv: secret.secretCommentIV, - tag: secret.secretCommentTag, - key: dto.key - }); - content[secretKey].comment = commentValue; - } - - content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding); - }); - - const expandSecrets = interpolateSecrets({ + const expandSecretReferences = interpolateSecrets({ projectId: dto.projectId, secretEncKey: dto.key, folderDAL, secretDAL }); - await expandSecrets(content); + // process secrets in current folder + const secrets = await secretDAL.findByFolderId(dto.folderId); + await Promise.allSettled( + secrets.map(async (secret) => { + const secretKey = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretKeyCiphertext, + iv: secret.secretKeyIV, + tag: secret.secretKeyTag, + key: dto.key + }); + + const secretValue = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretValueCiphertext, + iv: secret.secretValueIV, + tag: secret.secretValueTag, + key: dto.key + }); + const expandedSecretValue = await expandSecretReferences({ + environment: dto.environment, + secretPath: dto.secretPath, + skipMultilineEncoding: secret.skipMultilineEncoding, + value: secretValue + }); + + content[secretKey] = { value: expandedSecretValue || "" }; + + if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) { + const commentValue = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretCommentCiphertext, + iv: secret.secretCommentIV, + tag: secret.secretCommentTag, + key: dto.key + }); + content[secretKey].comment = commentValue; + } + + content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding); + }) + ); // check if current folder has any imports from other folders const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false }); @@ -291,7 +485,8 @@ export const secretQueueFactory = ({ projectId: dto.projectId, folderId: folder.id, key: dto.key, - depth: dto.depth + 1 + depth: dto.depth + 1, + secretPath: dto.secretPath }); // add the imported secrets to the current folder secrets @@ -302,9 +497,11 @@ export const secretQueueFactory = ({ return content; }; - const syncIntegrations = async (dto: TGetSecrets & { deDupeQueue?: Record }) => { + const syncIntegrations = async ( + dto: TGetSecrets & { isManual?: boolean; actorId?: string; deDupeQueue?: Record } + ) => { await queueService.queue(QueueName.IntegrationSync, QueueJobs.IntegrationSync, dto, { - attempts: 3, + attempts: 5, delay: 1000, backoff: { type: "exponential", @@ -317,10 +514,10 @@ export const secretQueueFactory = ({ const replicateSecrets = async (dto: Omit) => { await queueService.queue(QueueName.SecretReplication, QueueJobs.SecretReplication, dto, { - attempts: 3, + attempts: 5, backoff: { type: "exponential", - delay: 2000 + delay: 3000 }, removeOnComplete: true, removeOnFail: true @@ -337,6 +534,7 @@ export const secretQueueFactory = ({ logger.info( `syncSecrets: syncing project secrets where [projectId=${dto.projectId}] [environment=${dto.environmentSlug}] [path=${dto.secretPath}]` ); + const deDuplicationKey = uniqueSecretQueueKey(dto.environmentSlug, dto.secretPath); if ( !dto.excludeReplication @@ -361,7 +559,8 @@ export const secretQueueFactory = ({ { removeOnFail: true, removeOnComplete: true, - delay: 1000, + jobId: $getJobKey(dto.projectId, dto.environmentSlug, dto.secretPath), + delay: SYNC_SECRET_DEBOUNCE_INTERVAL_MS, attempts: 5, backoff: { type: "exponential", @@ -370,6 +569,17 @@ export const secretQueueFactory = ({ } ); }; + const sendFailedIntegrationSyncEmails = async (payload: TFailedIntegrationSyncEmailsPayload) => { + const appCfg = getConfig(); + if (!appCfg.isSmtpConfigured) return; + + await queueService.queue(QueueName.IntegrationSync, QueueJobs.SendFailedIntegrationSyncEmails, payload, { + jobId: `send-failed-integration-sync-emails-${payload.projectId}-${payload.secretPath}-${payload.environmentSlug}`, + delay: 1_000 * 60, // 1 minute + removeOnFail: true, + removeOnComplete: true + }); + }; queueService.start(QueueName.SecretSync, async (job) => { const { @@ -400,7 +610,7 @@ export const secretQueueFactory = ({ } } ); - await syncIntegrations({ secretPath, projectId, environment, deDupeQueue }); + await syncIntegrations({ secretPath, projectId, environment, deDupeQueue, isManual: false }); if (!excludeReplication) { await replicateSecrets({ _deDupeReplicationQueue: deDupeReplicationQueue, @@ -416,17 +626,53 @@ export const secretQueueFactory = ({ }); queueService.start(QueueName.IntegrationSync, async (job) => { - const { environment, projectId, secretPath, depth = 1, deDupeQueue = {} } = job.data; - if (depth > MAX_SYNC_SECRET_DEPTH) return; + if (job.name === QueueJobs.SendFailedIntegrationSyncEmails) { + const appCfg = getConfig(); - const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) { - logger.error(new Error("Secret path not found")); - return; + const jobPayload = job.data as TFailedIntegrationSyncEmailsPayload; + + const projectMembers = await projectMembershipDAL.findAllProjectMembers(jobPayload.projectId); + const project = await projectDAL.findById(jobPayload.projectId); + + // Only send emails to admins, and if its a manual trigger, only send it to the person who triggered it (if actor is admin as well) + const filteredProjectMembers = projectMembers + .filter((member) => member.roles.some((role) => role.role === ProjectMembershipRole.Admin)) + .filter((member) => + jobPayload.manuallyTriggeredByUserId ? member.userId === jobPayload.manuallyTriggeredByUserId : true + ); + + await smtpService.sendMail({ + recipients: filteredProjectMembers.map((member) => member.user.email!), + template: SmtpTemplates.IntegrationSyncFailed, + subjectLine: `Integration Sync Failed`, + substitutions: { + syncMessage: jobPayload.count === 1 ? jobPayload.syncMessage : undefined, // We are only displaying the sync message if its a singular integration, so we can just grab the first one in the array. + secretPath: jobPayload.secretPath, + environment: jobPayload.environmentName, + count: jobPayload.count, + projectName: project.name, + integrationUrl: `${appCfg.SITE_URL}/integrations/${project.id}` + } + }); } - // start syncing all linked imports also - if (depth < MAX_SYNC_SECRET_DEPTH) { + if (job.name === QueueJobs.IntegrationSync) { + const { + environment, + actorId, + isManual, + projectId, + secretPath, + depth = 1, + deDupeQueue = {} + } = job.data as TIntegrationSyncPayload; + if (depth > MAX_SYNC_SECRET_DEPTH) return; + + const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); + if (!folder) { + throw new Error("Secret path not found"); + } + // find all imports made with the given environment and secret path const linkSourceDto = { projectId, @@ -442,7 +688,7 @@ export const secretQueueFactory = ({ const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds); const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string); logger.info( - `getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]` + `getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]` ); await Promise.all( imports @@ -469,25 +715,39 @@ export const secretQueueFactory = ({ ) ); } - - const secretReferences = await secretDAL.findReferencedSecretReferences( - projectId, - folder.environment.slug, - secretPath - ); - if (secretReferences.length) { - const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId); + const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(projectId); + const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + let referencedFolderIds; + if (shouldUseSecretV2Bridge) { + const secretReferences = await secretV2BridgeDAL.findReferencedSecretReferences( + projectId, + folder.environment.slug, + secretPath + ); + referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId); + } else { + const secretReferences = await secretDAL.findReferencedSecretReferences( + projectId, + folder.environment.slug, + secretPath + ); + referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId); + } + if (referencedFolderIds.length) { const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds); const referencedFoldersGroupedById = groupBy(referencedFolders.filter(Boolean), (i) => i?.id as string); logger.info( - `getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]` + `getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]` ); await Promise.all( - secretReferences - .filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0]?.path)) + referencedFolderIds + .filter((folderId) => Boolean(referencedFoldersGroupedById[folderId][0]?.path)) // filter out already synced ones .filter( - ({ folderId }) => + (folderId) => !deDupeQueue[ uniqueSecretQueueKey( referencedFoldersGroupedById[folderId][0]?.environmentSlug as string, @@ -495,7 +755,7 @@ export const secretQueueFactory = ({ ) ] ) - .map(({ folderId }) => + .map((folderId) => syncSecrets({ projectId, secretPath: referencedFoldersGroupedById[folderId][0]?.path as string, @@ -507,85 +767,231 @@ export const secretQueueFactory = ({ ) ); } - } else { - logger.info(`getIntegrationSecrets: Secret depth exceeded for [projectId=${projectId}] [folderId=${folder.id}]`); - } - const integrations = await integrationDAL.findByProjectIdV2(projectId, environment); // note: returns array of integrations + integration auths in this environment - const toBeSyncedIntegrations = integrations.filter( - // note: sync only the integrations sourced from secretPath - ({ secretPath: integrationSecPath, isActive }) => isActive && isSamePath(secretPath, integrationSecPath) - ); + const lock = await keyStore.acquireLock( + [KeyStorePrefixes.SyncSecretIntegrationLock(projectId, environment, secretPath)], + 60000, + { + retryCount: 10, + retryDelay: 3000, + retryJitter: 500 + } + ); - if (!integrations.length) return; - logger.info( - `getIntegrationSecrets: secret integration sync started [jobId=${job.id}] [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${job.data.depth}]` - ); - for (const integration of toBeSyncedIntegrations) { - const integrationAuth = { - ...integration.integrationAuth, - createdAt: new Date(), - updatedAt: new Date(), - projectId: integration.projectId - }; - - const botKey = await projectBotService.getBotKey(projectId); - const { accessToken, accessId } = await integrationAuthService.getIntegrationAccessToken(integrationAuth, botKey); - const secrets = await getIntegrationSecrets({ - environment, - projectId, - folderId: folder.id, - key: botKey, - depth: 1 - }); - const suffixedSecrets: typeof secrets = {}; - const metadata = integration.metadata as Record; - if (metadata) { - Object.keys(secrets).forEach((key) => { - const prefix = metadata?.secretPrefix || ""; - const suffix = metadata?.secretSuffix || ""; - const newKey = prefix + key + suffix; - suffixedSecrets[newKey] = secrets[key]; - }); - } + const integrationsFailedToSync: { integrationId: string; syncMessage?: string }[] = []; + const lockAcquiredTime = new Date(); + // akhilmhdh: this try catch is for lock release try { - await syncIntegrationSecrets({ - createManySecretsRawFn, - updateManySecretsRawFn, - integrationDAL, - integration, - integrationAuth, - secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets, - accessId: accessId as string, - accessToken, - appendices: { - prefix: metadata?.secretPrefix || "", - suffix: metadata?.secretSuffix || "" + const lastRunSyncIntegrationTimestamp = await keyStore.getItem( + KeyStorePrefixes.SyncSecretIntegrationLastRunTimestamp(projectId, environment, secretPath) + ); + + // check whether the integration should wait or not + if (lastRunSyncIntegrationTimestamp) { + const INTEGRATION_INTERVAL = 2000; + + const timeDifferenceWithLastIntegration = getTimeDifferenceInSeconds( + lockAcquiredTime.toISOString(), + lastRunSyncIntegrationTimestamp + ); + + // give some time for integration to breath + if (timeDifferenceWithLastIntegration < INTEGRATION_INTERVAL) + await new Promise((resolve) => { + setTimeout(resolve, INTEGRATION_INTERVAL); + }); + } + + const integrations = await integrationDAL.findByProjectIdV2(projectId, environment); // note: returns array of integrations + integration auths in this environment + const toBeSyncedIntegrations = integrations.filter( + // note: sync only the integrations sourced from secretPath + ({ secretPath: integrationSecPath, isActive }) => isActive && isSamePath(secretPath, integrationSecPath) + ); + + if (!integrations.length) return; + logger.info( + `getIntegrationSecrets: secret integration sync started [jobId=${job.id}] [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]` + ); + const secrets = shouldUseSecretV2Bridge + ? await getIntegrationSecretsV2({ + environment, + projectId, + folderId: folder.id, + depth: 1, + secretPath, + decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "") + }) + : await getIntegrationSecrets({ + environment, + projectId, + folderId: folder.id, + key: botKey as string, + depth: 1, + secretPath + }); + + for (const integration of toBeSyncedIntegrations) { + const integrationAuth = { + ...integration.integrationAuth, + createdAt: new Date(), + updatedAt: new Date(), + projectId: integration.projectId + }; + + const { accessToken, accessId } = await integrationAuthService.getIntegrationAccessToken( + integrationAuth, + shouldUseSecretV2Bridge, + botKey + ); + let awsAssumeRoleArn = null; + if (shouldUseSecretV2Bridge) { + if (integrationAuth.encryptedAwsAssumeIamRoleArn) { + awsAssumeRoleArn = secretManagerDecryptor({ + cipherTextBlob: Buffer.from(integrationAuth.encryptedAwsAssumeIamRoleArn) + }).toString(); + } + } else if ( + integrationAuth.awsAssumeIamRoleArnTag && + integrationAuth.awsAssumeIamRoleArnIV && + integrationAuth.awsAssumeIamRoleArnCipherText + ) { + awsAssumeRoleArn = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: integrationAuth.awsAssumeIamRoleArnCipherText, + iv: integrationAuth.awsAssumeIamRoleArnIV, + tag: integrationAuth.awsAssumeIamRoleArnTag, + key: botKey as string + }); } - }); - await integrationDAL.updateById(integration.id, { - lastSyncJobId: job.id, - lastUsed: new Date(), - syncMessage: "", - isSynced: true - }); - } catch (err: unknown) { - logger.info("Secret integration sync error: %o", err); - const message = - err instanceof AxiosError ? JSON.stringify((err as AxiosError)?.response?.data) : (err as Error)?.message; + const suffixedSecrets: typeof secrets = {}; + const metadata = integration.metadata as Record; + if (metadata) { + Object.keys(secrets).forEach((key) => { + const prefix = metadata?.secretPrefix || ""; + const suffix = metadata?.secretSuffix || ""; + const newKey = prefix + key + suffix; + suffixedSecrets[newKey] = secrets[key]; + }); + } - await integrationDAL.updateById(integration.id, { - lastSyncJobId: job.id, - lastUsed: new Date(), - syncMessage: message, - isSynced: false - }); + // akhilmhdh: this try catch is for catching integration error and saving it in db + try { + // akhilmhdh: this needs to changed later to be more easier to use + // at present this is not at all extendable like to add a new parameter for just one integration need to modify multiple places + const response = await syncIntegrationSecrets({ + createManySecretsRawFn, + updateManySecretsRawFn, + integrationDAL, + integration, + integrationAuth, + secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets, + accessId: accessId as string, + awsAssumeRoleArn, + accessToken, + projectId, + appendices: { + prefix: metadata?.secretPrefix || "", + suffix: metadata?.secretSuffix || "" + } + }); + + await auditLogService.createAuditLog({ + projectId, + actor: await $generateActor(actorId, isManual), + event: { + type: EventType.INTEGRATION_SYNCED, + metadata: { + integrationId: integration.id, + isSynced: response?.isSynced ?? true, + lastSyncJobId: job?.id ?? "", + lastUsed: new Date(), + syncMessage: response?.syncMessage ?? "" + } + } + }); + + await integrationDAL.updateById(integration.id, { + lastSyncJobId: job.id, + lastUsed: new Date(), + syncMessage: response?.syncMessage ?? "", + isSynced: response?.isSynced ?? true + }); + + // May be undefined, if it's undefined we assume the sync was successful, hence the strict equality type check. + if (response?.isSynced === false) { + integrationsFailedToSync.push({ + integrationId: integration.id, + syncMessage: response.syncMessage + }); + } + } catch (err) { + logger.error( + err, + `Secret integration sync error [projectId=${job.data.projectId}] [environment=${environment}] [secretPath=${job.data.secretPath}]` + ); + + const message = + (err instanceof AxiosError ? JSON.stringify(err?.response?.data) : (err as Error)?.message) || + "Unknown error occurred."; + + await auditLogService.createAuditLog({ + projectId, + actor: await $generateActor(actorId, isManual), + event: { + type: EventType.INTEGRATION_SYNCED, + metadata: { + integrationId: integration.id, + isSynced: false, + lastSyncJobId: job?.id ?? "", + lastUsed: new Date(), + syncMessage: message + } + } + }); + + // re-throw error to re-run job unless final attempt, then log and send fail email + if (job.attemptsStarted !== job.opts.attempts) { + throw err; + } + + await integrationDAL.updateById(integration.id, { + lastSyncJobId: job.id, + syncMessage: message, + isSynced: false + }); + + integrationsFailedToSync.push({ + integrationId: integration.id, + syncMessage: message + }); + } + } + } finally { + await lock.release(); + if (integrationsFailedToSync.length) { + await sendFailedIntegrationSyncEmails({ + count: integrationsFailedToSync.length, + environmentName: folder.environment.name, + environmentSlug: environment, + ...(isManual && + actorId && { + manuallyTriggeredByUserId: actorId + }), + projectId, + secretPath, + syncMessage: integrationsFailedToSync[0].syncMessage + }); + } } - } - logger.info("Secret integration sync ended: %s", job.id); + await keyStore.setItemWithExpiry( + KeyStorePrefixes.SyncSecretIntegrationLastRunTimestamp(projectId, environment, secretPath), + KeyStoreTtls.SetSyncSecretIntegrationLastRunTimestampInSeconds, + lockAcquiredTime.toISOString() + ); + logger.info("Secret integration sync ended: %s", job.id); + } }); queueService.start(QueueName.SecretReminder, async ({ data }) => { @@ -625,17 +1031,416 @@ export const secretQueueFactory = ({ }); }); + const startSecretV2Migration = async (projectId: string) => { + await queueService.queue( + QueueName.ProjectV3Migration, + QueueJobs.ProjectV3Migration, + { projectId }, + { + removeOnComplete: true, + removeOnFail: true + } + ); + }; + + queueService.start(QueueName.ProjectV3Migration, async (job) => { + const { projectId } = job.data; + const { + botKey, + shouldUseSecretV2Bridge: isProjectUpgradedToV3, + project, + bot + } = await projectBotService.getBotKey(projectId); + if (isProjectUpgradedToV3 || project.upgradeStatus === ProjectUpgradeStatus.InProgress) { + return; + } + + if (!botKey) throw new NotFoundError({ message: `Project bot not found for project ${projectId}` }); + await projectDAL.updateById(projectId, { upgradeStatus: ProjectUpgradeStatus.InProgress }); + + const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ + projectId, + type: KmsDataKey.SecretManager + }); + + const folders = await folderDAL.findByProjectId(projectId); + // except secret version and snapshot migrate rest of everything first in a transaction + await secretDAL.transaction(async (tx) => { + // if project v1 create the project ghost user + if (project.version === ProjectVersion.V1) { + const ghostUser = await orgService.addGhostUser(project.orgId, tx); + const projectMembership = await projectMembershipDAL.create( + { + userId: ghostUser.user.id, + projectId: project.id + }, + tx + ); + await projectUserMembershipRoleDAL.create( + { projectMembershipId: projectMembership.id, role: ProjectMembershipRole.Admin }, + tx + ); + + const { key: encryptedProjectKey, iv: encryptedProjectKeyIv } = createProjectKey({ + publicKey: ghostUser.keys.publicKey, + privateKey: ghostUser.keys.plainPrivateKey, + plainProjectKey: botKey + }); + + // 4. Save the project key for the ghost user. + await projectKeyDAL.create( + { + projectId: project.id, + receiverId: ghostUser.user.id, + encryptedKey: encryptedProjectKey, + nonce: encryptedProjectKeyIv, + senderId: ghostUser.user.id + }, + tx + ); + const { iv, tag, ciphertext, encoding, algorithm } = infisicalSymmetricEncypt(ghostUser.keys.plainPrivateKey); + await projectBotDAL.updateById( + bot.id, + { + tag, + iv, + encryptedProjectKey, + encryptedProjectKeyNonce: encryptedProjectKeyIv, + encryptedPrivateKey: ciphertext, + isActive: true, + publicKey: ghostUser.keys.publicKey, + senderId: ghostUser.user.id, + algorithm, + keyEncoding: encoding + }, + tx + ); + } + + for (const folder of folders) { + const folderId = folder.id; + /* + * Secrets Migration + * */ + // eslint-disable-next-line no-await-in-loop + const projectV1Secrets = await secretDAL.find({ folderId }, { tx }); + if (projectV1Secrets.length) { + const secretReferences: { + secretId: string; + references: { environment: string; secretPath: string; secretKey: string }[]; + }[] = []; + await secretV2BridgeDAL.batchInsert( + projectV1Secrets.map((el) => { + const key = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretKeyCiphertext, + iv: el.secretKeyIV, + tag: el.secretKeyTag, + key: botKey + }); + const value = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretValueCiphertext, + iv: el.secretValueIV, + tag: el.secretValueTag, + key: botKey + }); + const comment = + el.secretCommentCiphertext && el.secretCommentTag && el.secretCommentIV + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretCommentCiphertext, + iv: el.secretCommentIV, + tag: el.secretCommentTag, + key: botKey + }) + : ""; + const encryptedValue = secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob; + // create references + const references = getAllSecretReferences(value).nestedReferences; + secretReferences.push({ secretId: el.id, references }); + + const encryptedComment = comment + ? secretManagerEncryptor({ plainText: Buffer.from(comment) }).cipherTextBlob + : null; + return { + id: el.id, + createdAt: el.createdAt, + updatedAt: el.updatedAt, + skipMultilineEncoding: el.skipMultilineEncoding, + encryptedComment, + encryptedValue, + key, + version: el.version, + type: el.type, + userId: el.userId, + folderId: el.folderId, + metadata: el.metadata, + reminderNote: el.secretReminderNote, + reminderRepeatDays: el.secretReminderRepeatDays + }; + }), + tx + ); + await secretV2BridgeDAL.upsertSecretReferences(secretReferences, tx); + } + + const SNAPSHOT_BATCH_SIZE = 10; + const snapshots = await snapshotDAL.findNSecretV1SnapshotByFolderId(folderId, SNAPSHOT_BATCH_SIZE, tx); + const projectV3SecretVersionsGroupById: Record = {}; + const projectV3SecretVersionTags: { secret_versions_v2Id: string; secret_tagsId: string }[] = []; + const projectV3SnapshotSecrets: Omit[] = []; + snapshots.forEach(({ secretVersions = [], ...snapshot }) => { + secretVersions.forEach((el) => { + projectV3SnapshotSecrets.push({ + secretVersionId: el.id, + snapshotId: snapshot.id, + createdAt: snapshot.createdAt, + updatedAt: snapshot.updatedAt, + envId: el.snapshotEnvId + }); + if (projectV3SecretVersionsGroupById[el.id]) return; + + const key = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretKeyCiphertext, + iv: el.secretKeyIV, + tag: el.secretKeyTag, + key: botKey + }); + const value = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretValueCiphertext, + iv: el.secretValueIV, + tag: el.secretValueTag, + key: botKey + }); + const comment = + el.secretCommentCiphertext && el.secretCommentTag && el.secretCommentIV + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretCommentCiphertext, + iv: el.secretCommentIV, + tag: el.secretCommentTag, + key: botKey + }) + : ""; + const encryptedValue = secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob; + + const encryptedComment = comment + ? secretManagerEncryptor({ plainText: Buffer.from(comment) }).cipherTextBlob + : null; + projectV3SecretVersionsGroupById[el.id] = { + id: el.id, + createdAt: el.createdAt, + updatedAt: el.updatedAt, + skipMultilineEncoding: el.skipMultilineEncoding, + encryptedComment, + encryptedValue, + key, + version: el.version, + type: el.type, + userId: el.userId, + folderId: el.folderId, + metadata: el.metadata, + reminderNote: el.secretReminderNote, + reminderRepeatDays: el.secretReminderRepeatDays, + secretId: el.secretId, + envId: el.envId + }; + el.tags.forEach(({ secretTagId }) => { + projectV3SecretVersionTags.push({ secret_tagsId: secretTagId, secret_versions_v2Id: el.id }); + }); + }); + }); + // this is corner case in which some times the snapshot may not have the secret version of an existing secret + // example: on some integration it will pull values from 3rd party on integration but snapshot is not taken + // Thus it won't have secret version + const latestSecretVersionByFolder = await secretVersionDAL.findLatestVersionMany( + folderId, + projectV1Secrets.map((el) => el.id), + tx + ); + Object.values(latestSecretVersionByFolder).forEach((el) => { + if (projectV3SecretVersionsGroupById[el.id]) return; + const key = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretKeyCiphertext, + iv: el.secretKeyIV, + tag: el.secretKeyTag, + key: botKey + }); + const value = decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretValueCiphertext, + iv: el.secretValueIV, + tag: el.secretValueTag, + key: botKey + }); + const comment = + el.secretCommentCiphertext && el.secretCommentTag && el.secretCommentIV + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.secretCommentCiphertext, + iv: el.secretCommentIV, + tag: el.secretCommentTag, + key: botKey + }) + : ""; + const encryptedValue = secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob; + + const encryptedComment = comment + ? secretManagerEncryptor({ plainText: Buffer.from(comment) }).cipherTextBlob + : null; + projectV3SecretVersionsGroupById[el.id] = { + id: el.id, + createdAt: el.createdAt, + updatedAt: el.updatedAt, + skipMultilineEncoding: el.skipMultilineEncoding, + encryptedComment, + encryptedValue, + key, + version: el.version, + type: el.type, + userId: el.userId, + folderId: el.folderId, + metadata: el.metadata, + reminderNote: el.secretReminderNote, + reminderRepeatDays: el.secretReminderRepeatDays, + secretId: el.secretId, + envId: el.envId + }; + }); + + const projectV3SecretVersions = Object.values(projectV3SecretVersionsGroupById); + if (projectV3SecretVersions.length) { + await secretVersionV2BridgeDAL.batchInsert(projectV3SecretVersions, tx); + } + if (projectV3SecretVersionTags.length) { + await secretVersionTagV2BridgeDAL.batchInsert(projectV3SecretVersionTags, tx); + } + + if (projectV3SnapshotSecrets.length) { + await snapshotSecretV2BridgeDAL.batchInsert(projectV3SnapshotSecrets, tx); + } + await snapshotDAL.deleteSnapshotsAboveLimit(folderId, SNAPSHOT_BATCH_SIZE, tx); + } + /* + * Secret Tag Migration + * */ + // eslint-disable-next-line no-await-in-loop + const projectV1SecretTags = await secretTagDAL.findSecretTagsByProjectId(projectId, tx); + if (projectV1SecretTags.length) { + await secretTagDAL.saveTagsToSecretV2( + projectV1SecretTags.map((el) => ({ + secrets_v2Id: el.secretsId, + secret_tagsId: el.secret_tagsId + })), + tx + ); + } + + /* + * Integration Auth Migration + * Saving the new encrypted colum + * */ + // eslint-disable-next-line no-await-in-loop + const projectV1IntegrationAuths = await integrationAuthDAL.find({ projectId }, { tx }); + await integrationAuthDAL.upsert( + projectV1IntegrationAuths.map((el) => { + const accessToken = + el.accessIV && el.accessTag && el.accessCiphertext + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.accessCiphertext, + iv: el.accessIV, + tag: el.accessTag, + key: botKey + }) + : undefined; + const accessId = + el.accessIdIV && el.accessIdTag && el.accessIdCiphertext + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.accessIdCiphertext, + iv: el.accessIdIV, + tag: el.accessIdTag, + key: botKey + }) + : undefined; + const refreshToken = + el.refreshIV && el.refreshTag && el.refreshCiphertext + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.refreshCiphertext, + iv: el.refreshIV, + tag: el.refreshTag, + key: botKey + }) + : undefined; + const awsAssumeRoleArn = + el.awsAssumeIamRoleArnCipherText && el.awsAssumeIamRoleArnIV && el.awsAssumeIamRoleArnTag + ? decryptSymmetric128BitHexKeyUTF8({ + ciphertext: el.awsAssumeIamRoleArnCipherText, + iv: el.awsAssumeIamRoleArnIV, + tag: el.awsAssumeIamRoleArnTag, + key: botKey + }) + : undefined; + + const encryptedAccess = accessToken + ? secretManagerEncryptor({ plainText: Buffer.from(accessToken) }).cipherTextBlob + : null; + const encryptedAccessId = accessId + ? secretManagerEncryptor({ plainText: Buffer.from(accessId) }).cipherTextBlob + : null; + const encryptedRefresh = refreshToken + ? secretManagerEncryptor({ plainText: Buffer.from(refreshToken) }).cipherTextBlob + : null; + const encryptedAwsAssumeIamRoleArn = awsAssumeRoleArn + ? secretManagerEncryptor({ plainText: Buffer.from(awsAssumeRoleArn) }).cipherTextBlob + : null; + return { + ...el, + encryptedAccess, + encryptedRefresh, + encryptedAccessId, + encryptedAwsAssumeIamRoleArn + }; + }), + "id", + tx + ); + /* + * Secret Rotation Secret Migration + * Saving the new encrypted colum + * */ + const projectV1SecretRotations = await secretRotationDAL.find({ projectId }, tx); + await secretRotationDAL.secretOutputV2InsertMany( + projectV1SecretRotations.flatMap((el) => + el.outputs.map((output) => ({ rotationId: el.id, key: output.key, secretId: output.secret.id })) + ), + tx + ); + + /* + * approvals: we will delete all approvals this is because some secret versions may not be added yet + * Thus doesn't make sense for rest to be there + * */ + await secretApprovalRequestDAL.deleteByProjectId(projectId, tx); + await projectDAL.updateById(projectId, { upgradeStatus: null, version: ProjectVersion.V3 }, tx); + }); + }); + + // eslint-disable-next-line + queueService.listen(QueueName.ProjectV3Migration, "failed", async (job, err) => { + if (job?.data) { + const { projectId } = job.data; + await projectDAL.updateById(projectId, { upgradeStatus: ProjectUpgradeStatus.Failed }); + logger.error(err, `Failed to migrate project to v3: ${projectId}`); + } + }); + queueService.listen(QueueName.IntegrationSync, "failed", (job, err) => { logger.error(err, "Failed to sync integration %s", job?.id); }); queueService.start(QueueName.SecretWebhook, async (job) => { - await fnTriggerWebhook({ ...job.data, projectEnvDAL, webhookDAL }); + await fnTriggerWebhook({ ...job.data, projectEnvDAL, webhookDAL, projectDAL }); }); return { // depth is internal only field thus no need to make it available outside syncSecrets, + startSecretV2Migration, syncIntegrations, addSecretReminder, removeSecretReminder, diff --git a/backend/src/services/secret/secret-service.ts b/backend/src/services/secret/secret-service.ts index d6682a2536..d62d09f7a1 100644 --- a/backend/src/services/secret/secret-service.ts +++ b/backend/src/services/secret/secret-service.ts @@ -4,6 +4,7 @@ import { ForbiddenError, subject } from "@casl/ability"; import { ProjectMembershipRole, + ProjectUpgradeStatus, SecretEncryptionAlgo, SecretKeyEncoding, SecretsSchema, @@ -11,6 +12,10 @@ import { } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service"; +import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal"; +import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal"; +import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service"; import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service"; import { getConfig } from "@app/lib/config/env"; import { @@ -18,9 +23,12 @@ import { decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy, pick } from "@app/lib/fn"; import { logger } from "@app/lib/logger"; +import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { OrgServiceActor } from "@app/lib/types"; +import { TGetSecretsRawByFolderMappingsDTO } from "@app/services/secret-v2-bridge/secret-v2-bridge-types"; import { ActorType } from "../auth/auth-type"; import { TProjectDALFactory } from "../project/project-dal"; @@ -31,6 +39,8 @@ import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; import { TSecretImportDALFactory } from "../secret-import/secret-import-dal"; import { fnSecretsFromImports } from "../secret-import/secret-import-fns"; import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal"; +import { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service"; +import { TGetSecretReferencesTreeDTO } from "../secret-v2-bridge/secret-v2-bridge-types"; import { TSecretDALFactory } from "./secret-dal"; import { decryptSecretRaw, @@ -44,6 +54,8 @@ import { } from "./secret-fns"; import { TSecretQueueFactory } from "./secret-queue"; import { + SecretOperations, + SecretProtectionType, TAttachSecretTagsDTO, TBackFillSecretReferencesDTO, TCreateBulkSecretDTO, @@ -59,6 +71,8 @@ import { TGetSecretsDTO, TGetSecretsRawDTO, TGetSecretVersionsDTO, + TMoveSecretsDTO, + TStartSecretsV2MigrationDTO, TUpdateBulkSecretDTO, TUpdateManySecretRawDTO, TUpdateSecretDTO, @@ -77,13 +91,27 @@ type TSecretServiceFactoryDep = { TSecretFolderDALFactory, "findBySecretPath" | "updateById" | "findById" | "findByManySecretPath" | "find" >; + secretV2BridgeService: TSecretV2BridgeServiceFactory; secretBlindIndexDAL: TSecretBlindIndexDALFactory; permissionService: Pick; snapshotService: Pick; - secretQueueService: Pick; + secretQueueService: Pick< + TSecretQueueFactory, + "syncSecrets" | "handleSecretReminder" | "removeSecretReminder" | "startSecretV2Migration" + >; projectBotService: Pick; secretImportDAL: Pick; secretVersionTagDAL: Pick; + secretApprovalPolicyService: Pick; + secretApprovalRequestService: Pick< + TSecretApprovalRequestServiceFactory, + "generateSecretApprovalRequest" | "generateSecretApprovalRequestV2Bridge" + >; + secretApprovalRequestDAL: Pick; + secretApprovalRequestSecretDAL: Pick< + TSecretApprovalRequestSecretDALFactory, + "insertMany" | "insertApprovalSecretTags" + >; }; export type TSecretServiceFactory = ReturnType; @@ -100,19 +128,24 @@ export const secretServiceFactory = ({ projectDAL, projectBotService, secretImportDAL, - secretVersionTagDAL + secretVersionTagDAL, + secretApprovalPolicyService, + secretApprovalRequestDAL, + secretApprovalRequestSecretDAL, + secretV2BridgeService, + secretApprovalRequestService }: TSecretServiceFactoryDep) => { const getSecretReference = async (projectId: string) => { // if bot key missing means e2e still exist - const botKey = await projectBotService.getBotKey(projectId).catch(() => null); + const projectBot = await projectBotService.getBotKey(projectId).catch(() => null); return (el: { ciphertext?: string; iv: string; tag: string }) => - botKey + projectBot?.botKey ? getAllNestedSecretReferences( decryptSymmetric128BitHexKeyUTF8({ ciphertext: el.ciphertext || "", iv: el.iv, tag: el.tag, - key: botKey + key: projectBot.botKey }) ) : undefined; @@ -123,7 +156,12 @@ export const secretServiceFactory = ({ const appCfg = getConfig(); const secretBlindIndexDoc = await secretBlindIndexDAL.findOne({ projectId }); - if (!secretBlindIndexDoc) throw new BadRequestError({ message: "Blind index not found", name: "Create secret" }); + if (!secretBlindIndexDoc) { + throw new NotFoundError({ + message: `Blind index for project with ID '${projectId}' not found`, + name: "CreateSecret" + }); + } const secretBlindIndex = await buildSecretBlindIndexFromName({ secretName, @@ -134,7 +172,7 @@ export const secretServiceFactory = ({ ciphertext: secretBlindIndexDoc.encryptedSaltCipherText, iv: secretBlindIndexDoc.saltIV }); - if (!secretBlindIndex) throw new BadRequestError({ message: "Secret not found" }); + if (!secretBlindIndex) throw new NotFoundError({ message: `Secret with name '${secretName}' not found` }); return secretBlindIndex; }; @@ -164,14 +202,19 @@ export const secretServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Create secret" + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "CreateSecret" }); const folderId = folder.id; const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "CreateSecret" }); + if (!blindIndexCfg) { + throw new NotFoundError({ + message: `Blind index for project with ID '${projectId}' not found`, + name: "CreateSecret" + }); + } if (ActorType.USER !== actor && inputSecret.type === SecretType.Personal) { throw new BadRequestError({ message: "Must be user to create personal secret" }); @@ -202,7 +245,8 @@ export const secretServiceFactory = ({ // validate tags // fetch all tags and if not same count throw error meaning one was invalid tags const tags = inputSecret.tags ? await secretTagDAL.findManyTagsById(projectId, inputSecret.tags) : []; - if ((inputSecret.tags || []).length !== tags.length) throw new BadRequestError({ message: "Tag not found" }); + if ((inputSecret.tags || []).length !== tags.length) + throw new NotFoundError({ message: "One or more tags not found" }); const { secretName, type, ...el } = inputSecret; const references = await getSecretReference(projectId); @@ -234,14 +278,16 @@ export const secretServiceFactory = ({ }) ); - await snapshotService.performSnapshot(folderId); - await secretQueueService.syncSecrets({ - secretPath: path, - actorId, - actor, - projectId, - environmentSlug: folder.environment.slug - }); + if (inputSecret.type === SecretType.Shared) { + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + secretPath: path, + actorId, + actor, + projectId, + environmentSlug: folder.environment.slug + }); + } return { ...secret[0], environment, workspace: projectId, tags, secretPath: path }; }; @@ -275,14 +321,18 @@ export const secretServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Create secret" + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "CreateSecret" }); const folderId = folder.id; const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "CreateSecret" }); + if (!blindIndexCfg) + throw new NotFoundError({ + message: `Blind index for project with ID '${projectId}' not found`, + name: "CreateSecret" + }); if (ActorType.USER !== actor && inputSecret.type === SecretType.Personal) { throw new BadRequestError({ message: "Must be user to create personal secret" }); @@ -322,7 +372,8 @@ export const secretServiceFactory = ({ }); const tags = inputSecret.tags ? await secretTagDAL.findManyTagsById(projectId, inputSecret.tags) : []; - if ((inputSecret.tags || []).length !== tags.length) throw new BadRequestError({ message: "Tag not found" }); + if ((inputSecret.tags || []).length !== tags.length) + throw new NotFoundError({ message: "One or more tags not found" }); const { secretName, ...el } = inputSecret; @@ -369,14 +420,16 @@ export const secretServiceFactory = ({ }) ); - await snapshotService.performSnapshot(folderId); - await secretQueueService.syncSecrets({ - actor, - actorId, - secretPath: path, - projectId, - environmentSlug: folder.environment.slug - }); + if (inputSecret.type === SecretType.Shared) { + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + secretPath: path, + actorId, + actor, + projectId, + environmentSlug: folder.environment.slug + }); + } return { ...updatedSecret[0], workspace: projectId, environment, secretPath: path }; }; @@ -406,14 +459,18 @@ export const secretServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Create secret" + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "DeleteSecret" }); const folderId = folder.id; const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "CreateSecret" }); + if (!blindIndexCfg) + throw new NotFoundError({ + message: `Blind index for project with ID '${projectId}' not found`, + name: "DeleteSecret" + }); if (ActorType.USER !== actor && inputSecret.type === SecretType.Personal) { throw new BadRequestError({ message: "Must be user to create personal secret" }); @@ -444,15 +501,17 @@ export const secretServiceFactory = ({ }) ); - await snapshotService.performSnapshot(folderId); - await secretQueueService.syncSecrets({ - actor, - actorId, - secretPath: path, - projectId, - environmentSlug: folder.environment.slug - }); - // TODO(akhilmhdh-pg): licence check, posthog service and snapshot + if (inputSecret.type === SecretType.Shared) { + await snapshotService.performSnapshot(folderId); + await secretQueueService.syncSecrets({ + secretPath: path, + actorId, + actor, + projectId, + environmentSlug: folder.environment.slug + }); + } + return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path }; }; @@ -587,9 +646,9 @@ export const secretServiceFactory = ({ ); const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Create secret" + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "GetSecretByName" }); const folderId = folder.id; @@ -608,7 +667,7 @@ export const secretServiceFactory = ({ } const secret = await (version === undefined - ? secretDAL.findOne({ + ? secretDAL.findOneWithTags({ folderId, type: secretType, userId: secretType === SecretType.Personal ? actorId : null, @@ -658,7 +717,7 @@ export const secretServiceFactory = ({ } } } - if (!secret) throw new BadRequestError({ message: "Secret not found" }); + if (!secret) throw new NotFoundError({ message: `Secret with name '${secretName}' not found` }); return { ...secret, workspace: projectId, environment, secretPath: path }; }; @@ -689,14 +748,14 @@ export const secretServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Create secret" + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "CreateManySecret" }); const folderId = folder.id; const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Create secret" }); + if (!blindIndexCfg) throw new NotFoundError({ message: "Blind index not found", name: "Create secret" }); const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({ inputSecrets, @@ -709,7 +768,7 @@ export const secretServiceFactory = ({ // get all tags const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags); const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; - if (tags.length !== tagIds.length) throw new BadRequestError({ message: "Tag not found" }); + if (tags.length !== tagIds.length) throw new NotFoundError({ message: "One or more tags not found" }); const references = await getSecretReference(projectId); const newSecrets = await secretDAL.transaction(async (tx) => @@ -774,14 +833,14 @@ export const secretServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Update secret" + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "UpdateManySecret" }); const folderId = folder.id; const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Update secret" }); + if (!blindIndexCfg) throw new NotFoundError({ message: "Blind index not found", name: "Update secret" }); const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({ inputSecrets, @@ -805,7 +864,7 @@ export const secretServiceFactory = ({ // get all tags const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags); const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : []; - if (tagIds.length !== tags.length) throw new BadRequestError({ message: "Tag not found" }); + if (tagIds.length !== tags.length) throw new NotFoundError({ message: "One or more tags not found" }); const references = await getSecretReference(projectId); const secrets = await secretDAL.transaction(async (tx) => @@ -880,14 +939,18 @@ export const secretServiceFactory = ({ const folder = await folderDAL.findBySecretPath(projectId, environment, path); if (!folder) - throw new BadRequestError({ - message: "Folder not found for the given environment slug & secret path", - name: "Create secret" + throw new NotFoundError({ + message: `Folder with path '${path}' in environment with slug '${environment}' not found`, + name: "DeleteManySecret" }); const folderId = folder.id; const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId }); - if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Update secret" }); + if (!blindIndexCfg) + throw new NotFoundError({ + message: `Blind index for project with ID '${projectId}' not found`, + name: "DeleteManySecret" + }); const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({ inputSecrets, @@ -924,6 +987,133 @@ export const secretServiceFactory = ({ return secretsDeleted; }; + const getSecretsCount = async ({ + projectId, + path, + actor, + actorId, + actorOrgId, + actorAuthMethod, + environment, + tagSlugs = [], + ...v2Params + }: Pick< + TGetSecretsRawDTO, + | "projectId" + | "path" + | "actor" + | "actorId" + | "actorOrgId" + | "actorAuthMethod" + | "environment" + | "tagSlugs" + | "search" + >) => { + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + + if (!shouldUseSecretV2Bridge) + throw new BadRequestError({ + message: "Project version does not support pagination", + name: "PaginationNotSupportedError" + }); + + const count = await secretV2BridgeService.getSecretsCount({ + projectId, + actorId, + actor, + actorOrgId, + environment, + path, + actorAuthMethod, + tagSlugs, + ...v2Params + }); + + return count; + }; + + const getSecretsCountMultiEnv = async ({ + projectId, + path, + actor, + actorId, + actorOrgId, + actorAuthMethod, + environments, + ...v2Params + }: Pick< + TGetSecretsRawDTO, + "projectId" | "path" | "actor" | "actorId" | "actorOrgId" | "actorAuthMethod" | "search" + > & { environments: string[]; isInternal?: boolean }) => { + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + + if (!shouldUseSecretV2Bridge) + throw new BadRequestError({ + message: "Project version does not support pagination", + name: "PaginationNotSupportedError" + }); + + const count = await secretV2BridgeService.getSecretsCountMultiEnv({ + projectId, + actorId, + actor, + actorOrgId, + environments, + path, + actorAuthMethod, + ...v2Params + }); + + return count; + }; + + const getSecretsRawMultiEnv = async ({ + projectId, + path, + actor, + actorId, + actorOrgId, + actorAuthMethod, + environments, + ...params + }: Omit & { + environments: string[]; + isInternal?: boolean; + }) => { + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + + if (!shouldUseSecretV2Bridge) + throw new BadRequestError({ + message: "Project version does not support pagination", + name: "PaginationNotSupportError" + }); + + const secrets = await secretV2BridgeService.getSecretsMultiEnv({ + projectId, + actorId, + actor, + actorOrgId, + environments, + path, + actorAuthMethod, + ...params + }); + + return secrets; + }; + + const getSecretReferenceTree = async (dto: TGetSecretReferencesTreeDTO) => { + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(dto.projectId); + + if (!shouldUseSecretV2Bridge) + throw new BadRequestError({ + message: "Project version does not support secret reference tree", + name: "SecretReferenceTreeNotSupported" + }); + + return secretV2BridgeService.getSecretReferenceTree(dto); + }; + const getSecretsRaw = async ({ projectId, path, @@ -934,10 +1124,34 @@ export const secretServiceFactory = ({ environment, includeImports, expandSecretReferences, - recursive + recursive, + tagSlugs = [], + ...paramsV2 }: TGetSecretsRawDTO) => { - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + const { secrets, imports } = await secretV2BridgeService.getSecrets({ + projectId, + expandSecretReferences, + actorId, + actor, + actorOrgId, + environment, + path, + recursive, + actorAuthMethod, + includeImports, + tagSlugs, + ...paramsV2 + }); + return { secrets, imports }; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); const { secrets, imports } = await getSecrets({ actorId, @@ -952,6 +1166,9 @@ export const secretServiceFactory = ({ }); const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey)); + const filteredSecrets = tagSlugs.length + ? decryptedSecrets.filter((secret) => Boolean(secret.tags?.find((el) => tagSlugs.includes(el.slug)))) + : decryptedSecrets; const processedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => { const decryptedImportSecrets = importedSecrets.map((sec) => decryptSecretRaw( @@ -996,78 +1213,51 @@ export const secretServiceFactory = ({ }; }); + const expandSecret = interpolateSecrets({ + folderDAL, + projectId, + secretDAL, + secretEncKey: botKey + }); + if (expandSecretReferences) { - const expandSecrets = interpolateSecrets({ - folderDAL, - projectId, - secretDAL, - secretEncKey: botKey - }); - - const batchSecretsExpand = async ( - secretBatch: { - secretKey: string; - secretValue: string; - secretComment?: string; - secretPath: string; - skipMultilineEncoding: boolean | null | undefined; - }[] - ) => { - // Group secrets by secretPath - const secretsByPath: Record< - string, - { - secretKey: string; - secretValue: string; - secretComment?: string; - skipMultilineEncoding: boolean | null | undefined; - }[] - > = {}; - - secretBatch.forEach((secret) => { - if (!secretsByPath[secret.secretPath]) { - secretsByPath[secret.secretPath] = []; - } - secretsByPath[secret.secretPath].push(secret); - }); - - // Expand secrets for each group - for (const secPath in secretsByPath) { - if (!Object.hasOwn(secretsByPath, path)) { - // eslint-disable-next-line no-continue - continue; - } - - const secretRecord: Record< - string, - { value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined } - > = {}; - secretsByPath[secPath].forEach((decryptedSecret) => { - secretRecord[decryptedSecret.secretKey] = { - value: decryptedSecret.secretValue, - comment: decryptedSecret.secretComment, - skipMultilineEncoding: decryptedSecret.skipMultilineEncoding - }; - }); - - await expandSecrets(secretRecord); - - secretsByPath[secPath].forEach((decryptedSecret) => { - // eslint-disable-next-line no-param-reassign - decryptedSecret.secretValue = secretRecord[decryptedSecret.secretKey].value; - }); - } - }; - - // expand secrets - await batchSecretsExpand(decryptedSecrets); - - // expand imports by batch - await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets))); + const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath); + await Promise.allSettled( + Object.keys(secretsGroupByPath).map((groupedPath) => + Promise.allSettled( + secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => { + const expandedSecretValue = await expandSecret({ + value: decryptedSecret.secretValue, + secretPath: groupedPath, + environment, + skipMultilineEncoding: decryptedSecret.skipMultilineEncoding + }); + // eslint-disable-next-line no-param-reassign + secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || ""; + }) + ) + ) + ); + await Promise.allSettled( + processedImports.map((processedImport) => + Promise.allSettled( + processedImport.secrets.map(async (decryptedSecret, index) => { + const expandedSecretValue = await expandSecret({ + value: decryptedSecret.secretValue, + secretPath: path, + environment, + skipMultilineEncoding: decryptedSecret.skipMultilineEncoding + }); + // eslint-disable-next-line no-param-reassign + processedImport.secrets[index].secretValue = expandedSecretValue || ""; + }) + ) + ) + ); } return { - secrets: decryptedSecrets, + secrets: filteredSecrets, imports: processedImports }; }; @@ -1078,6 +1268,7 @@ export const secretServiceFactory = ({ actor, environment, projectId: workspaceId, + expandSecretReferences, projectSlug, actorId, actorOrgId, @@ -1087,11 +1278,26 @@ export const secretServiceFactory = ({ version }: TGetASecretRawDTO) => { const projectId = workspaceId || (await projectDAL.findProjectBySlug(projectSlug as string, actorOrgId)).id; + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + const secret = await secretV2BridgeService.getSecretByName({ + environment, + projectId, + includeImports, + actorAuthMethod, + path, + actorOrgId, + actor, + actorId, + expandSecretReferences, + type, + secretName + }); - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); + return secret; + } - const secret = await getSecretByName({ + const encryptedSecret = await getSecretByName({ actorId, projectId, actorAuthMethod, @@ -1105,7 +1311,30 @@ export const secretServiceFactory = ({ version }); - return decryptSecretRaw(secret, botKey); + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); + const decryptedSecret = decryptSecretRaw(encryptedSecret, botKey); + + if (expandSecretReferences) { + const expandSecret = interpolateSecrets({ + folderDAL, + projectId, + secretDAL, + secretEncKey: botKey + }); + const expandedSecretValue = await expandSecret({ + environment, + secretPath: path, + value: decryptedSecret.secretValue, + skipMultilineEncoding: decryptedSecret.skipMultilineEncoding + }); + decryptedSecret.secretValue = expandedSecretValue || ""; + } + + return decryptedSecret; }; const createSecretRaw = async ({ @@ -1120,14 +1349,103 @@ export const secretServiceFactory = ({ secretPath, secretValue, secretComment, - skipMultilineEncoding + skipMultilineEncoding, + tagIds, + secretReminderNote, + secretReminderRepeatDays }: TCreateSecretRawDTO) => { - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + const policy = + actor === ActorType.USER && type === SecretType.Shared + ? await secretApprovalPolicyService.getSecretApprovalPolicy(projectId, environment, secretPath) + : undefined; + if (shouldUseSecretV2Bridge) { + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequestV2Bridge({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Create]: [ + { + secretKey: secretName, + skipMultilineEncoding, + secretComment, + secretValue, + tagIds, + reminderNote: secretReminderNote, + reminderRepeatDays: secretReminderRepeatDays + } + ] + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } + const secret = await secretV2BridgeService.createSecret({ + secretName, + type, + actorId, + actor, + actorOrgId, + actorAuthMethod, + projectId, + environment, + secretPath, + secretComment, + secretValue, + tagIds, + secretReminderNote, + skipMultilineEncoding, + secretReminderRepeatDays + }); + return { secret, type: SecretProtectionType.Direct as const }; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secretName, botKey); const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey); const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey); + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequest({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Create]: [ + { + secretName, + secretKeyCiphertext: secretKeyEncrypted.ciphertext, + secretKeyIV: secretKeyEncrypted.iv, + secretKeyTag: secretKeyEncrypted.tag, + secretValueCiphertext: secretValueEncrypted.ciphertext, + secretValueIV: secretValueEncrypted.iv, + secretValueTag: secretValueEncrypted.tag, + secretCommentCiphertext: secretCommentEncrypted.ciphertext, + secretCommentIV: secretCommentEncrypted.iv, + secretCommentTag: secretCommentEncrypted.tag, + skipMultilineEncoding, + tagIds + } + ] + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } const secret = await createSecret({ secretName, @@ -1148,10 +1466,13 @@ export const secretServiceFactory = ({ secretCommentCiphertext: secretCommentEncrypted.ciphertext, secretCommentIV: secretCommentEncrypted.iv, secretCommentTag: secretCommentEncrypted.tag, - skipMultilineEncoding + skipMultilineEncoding, + secretReminderRepeatDays, + secretReminderNote, + tags: tagIds }); - return decryptSecretRaw(secret, botKey); + return { type: SecretProtectionType.Direct as const, secret: decryptSecretRaw(secret, botKey) }; }; const updateSecretRaw = async ({ @@ -1165,12 +1486,113 @@ export const secretServiceFactory = ({ type, secretPath, secretValue, - skipMultilineEncoding + skipMultilineEncoding, + tagIds, + secretReminderNote, + secretReminderRepeatDays, + metadata, + secretComment, + newSecretName }: TUpdateSecretRawDTO) => { - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + const policy = + actor === ActorType.USER && type === SecretType.Shared + ? await secretApprovalPolicyService.getSecretApprovalPolicy(projectId, environment, secretPath) + : undefined; + if (shouldUseSecretV2Bridge) { + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequestV2Bridge({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Update]: [ + { + secretKey: secretName, + newSecretName, + skipMultilineEncoding, + secretComment, + secretValue, + tagIds, + reminderNote: secretReminderNote, + reminderRepeatDays: secretReminderRepeatDays + } + ] + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } + const secret = await secretV2BridgeService.updateSecret({ + secretReminderRepeatDays, + skipMultilineEncoding, + secretReminderNote, + tagIds, + secretComment, + secretPath, + environment, + projectId, + actorAuthMethod, + actorOrgId, + actor, + actorId, + type, + secretName, + newSecretName, + metadata, + secretValue + }); + return { type: SecretProtectionType.Direct as const, secret }; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey); + const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey); + const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(newSecretName || secretName, botKey); + + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequest({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Update]: [ + { + secretName, + newSecretName, + skipMultilineEncoding, + secretKeyCiphertext: secretKeyEncrypted.ciphertext, + secretKeyIV: secretKeyEncrypted.iv, + secretKeyTag: secretKeyEncrypted.tag, + secretValueCiphertext: secretValueEncrypted.ciphertext, + secretValueIV: secretValueEncrypted.iv, + secretValueTag: secretValueEncrypted.tag, + secretCommentCiphertext: secretCommentEncrypted.ciphertext, + secretCommentIV: secretCommentEncrypted.iv, + secretCommentTag: secretCommentEncrypted.tag, + tagIds, + secretReminderNote, + secretReminderRepeatDays + } + ] + } + }); + return { approval, type: SecretProtectionType.Approval as const }; + } const secret = await updateSecret({ secretName, @@ -1185,11 +1607,22 @@ export const secretServiceFactory = ({ secretValueCiphertext: secretValueEncrypted.ciphertext, secretValueIV: secretValueEncrypted.iv, secretValueTag: secretValueEncrypted.tag, - skipMultilineEncoding + skipMultilineEncoding, + tags: tagIds, + metadata, + secretReminderRepeatDays, + secretReminderNote, + newSecretName, + secretKeyIV: secretKeyEncrypted.iv, + secretKeyTag: secretKeyEncrypted.tag, + secretKeyCiphertext: secretKeyEncrypted.ciphertext, + secretCommentIV: secretCommentEncrypted.iv, + secretCommentTag: secretCommentEncrypted.tag, + secretCommentCiphertext: secretCommentEncrypted.ciphertext }); await snapshotService.performSnapshot(secret.folderId); - return decryptSecretRaw(secret, botKey); + return { type: SecretProtectionType.Direct as const, secret: decryptSecretRaw(secret, botKey) }; }; const deleteSecretRaw = async ({ @@ -1203,9 +1636,70 @@ export const secretServiceFactory = ({ type, secretPath }: TDeleteSecretRawDTO) => { - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); - + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + const policy = + actor === ActorType.USER && type === SecretType.Shared + ? await secretApprovalPolicyService.getSecretApprovalPolicy(projectId, environment, secretPath) + : undefined; + if (shouldUseSecretV2Bridge) { + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequestV2Bridge({ + policy, + actorAuthMethod, + actorOrgId, + actorId, + actor, + projectId, + environment, + secretPath, + data: { + [SecretOperations.Delete]: [ + { + secretKey: secretName + } + ] + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } + const secret = await secretV2BridgeService.deleteSecret({ + secretName, + type, + actorId, + actor, + actorOrgId, + actorAuthMethod, + projectId, + environment, + secretPath + }); + return { type: SecretProtectionType.Direct as const, secret }; + } + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequest({ + policy, + actorAuthMethod, + actorOrgId, + actorId, + actor, + projectId, + environment, + secretPath, + data: { + [SecretOperations.Delete]: [ + { + secretName + } + ] + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } const secret = await deleteSecret({ secretName, projectId, @@ -1218,12 +1712,13 @@ export const secretServiceFactory = ({ actorAuthMethod }); - return decryptSecretRaw(secret, botKey); + return { type: SecretProtectionType.Direct as const, secret: decryptSecretRaw(secret, botKey) }; }; const createManySecretsRaw = async ({ actorId, projectSlug, + projectId: optionalProjectId, environment, actor, actorOrgId, @@ -1231,22 +1726,66 @@ export const secretServiceFactory = ({ secretPath, secrets: inputSecrets = [] }: TCreateManySecretRawDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; + if (!projectSlug && !optionalProjectId) + throw new BadRequestError({ message: "Must provide either project slug or projectId" }); - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); + let projectId = optionalProjectId as string; + // pick either project slug or projectid + if (!optionalProjectId && projectSlug) { + const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); + projectId = project.id; + } - const secrets = await createManySecret({ - projectId, - environment, - path: secretPath, - actor, - actorId, - actorOrgId, - actorAuthMethod, - secrets: inputSecrets.map(({ secretComment, secretKey, secretValue, skipMultilineEncoding }) => { + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + const policy = + actor === ActorType.USER + ? await secretApprovalPolicyService.getSecretApprovalPolicy(projectId, environment, secretPath) + : undefined; + if (shouldUseSecretV2Bridge) { + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequestV2Bridge({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Create]: inputSecrets.map((el) => ({ + tagIds: el.tagIds, + secretValue: el.secretValue, + secretComment: el.secretComment, + metadata: el.metadata, + skipMultilineEncoding: el.skipMultilineEncoding, + secretKey: el.secretKey + })) + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } + const secrets = await secretV2BridgeService.createManySecret({ + secretPath, + environment, + projectId, + actorAuthMethod, + actorOrgId, + actor, + actorId, + secrets: inputSecrets + }); + return { secrets, type: SecretProtectionType.Direct as const }; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); + const sanitizedSecrets = inputSecrets.map( + ({ secretComment, secretKey, metadata, tagIds, secretValue, skipMultilineEncoding }) => { const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secretKey, botKey); const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey); const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey); @@ -1261,34 +1800,31 @@ export const secretServiceFactory = ({ secretValueTag: secretValueEncrypted.tag, secretCommentCiphertext: secretCommentEncrypted.ciphertext, secretCommentIV: secretCommentEncrypted.iv, - secretCommentTag: secretCommentEncrypted.tag + secretCommentTag: secretCommentEncrypted.tag, + tags: tagIds, + tagIds, + metadata }; - }) - }); - - return secrets.map((secret) => - decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey) + } ); - }; + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequest({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Create]: sanitizedSecrets + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } - const updateManySecretsRaw = async ({ - actorId, - projectSlug, - environment, - actor, - actorOrgId, - actorAuthMethod, - secretPath, - secrets: inputSecrets = [] - }: TUpdateManySecretRawDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; - - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); - - const secrets = await updateManySecret({ + const secrets = await createManySecret({ projectId, environment, path: secretPath, @@ -1296,12 +1832,105 @@ export const secretServiceFactory = ({ actorId, actorOrgId, actorAuthMethod, - secrets: inputSecrets.map(({ secretComment, secretKey, secretValue, skipMultilineEncoding }) => { - const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secretKey, botKey); + secrets: sanitizedSecrets + }); + + return { + type: SecretProtectionType.Direct as const, + secrets: secrets.map((secret) => + decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey) + ) + }; + }; + + const updateManySecretsRaw = async ({ + actorId, + projectSlug, + projectId: optionalProjectId, + environment, + actor, + actorOrgId, + actorAuthMethod, + secretPath, + secrets: inputSecrets = [] + }: TUpdateManySecretRawDTO) => { + if (!projectSlug && !optionalProjectId) + throw new BadRequestError({ message: "Must provide either project slug or projectId" }); + + let projectId = optionalProjectId as string; + if (!optionalProjectId && projectSlug) { + const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); + projectId = project.id; + } + + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + const policy = + actor === ActorType.USER + ? await secretApprovalPolicyService.getSecretApprovalPolicy(projectId, environment, secretPath) + : undefined; + if (shouldUseSecretV2Bridge) { + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequestV2Bridge({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Update]: inputSecrets.map((el) => ({ + tagIds: el.tagIds, + secretValue: el.secretValue, + secretComment: el.secretComment, + skipMultilineEncoding: el.skipMultilineEncoding, + secretKey: el.secretKey + })) + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } + const secrets = await secretV2BridgeService.updateManySecret({ + secretPath, + environment, + projectId, + actorAuthMethod, + actorOrgId, + actor, + actorId, + secrets: inputSecrets + }); + return { type: SecretProtectionType.Direct as const, secrets }; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); + const sanitizedSecrets = inputSecrets.map( + ({ + secretComment, + secretKey, + secretValue, + skipMultilineEncoding, + tagIds: tags, + newSecretName, + secretReminderNote, + secretReminderRepeatDays + }) => { + const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(newSecretName || secretKey, botKey); const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey); const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey); return { secretName: secretKey, + newSecretName, + tags, + tagIds: tags, + secretReminderRepeatDays, + secretReminderNote, type: SecretType.Shared, skipMultilineEncoding, secretKeyCiphertext: secretKeyEncrypted.ciphertext, @@ -1314,17 +1943,48 @@ export const secretServiceFactory = ({ secretCommentIV: secretCommentEncrypted.iv, secretCommentTag: secretCommentEncrypted.tag }; - }) + } + ); + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequest({ + policy, + secretPath, + environment, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod, + data: { + [SecretOperations.Update]: sanitizedSecrets + } + }); + + return { type: SecretProtectionType.Approval as const, approval }; + } + const secrets = await updateManySecret({ + projectId, + environment, + path: secretPath, + actor, + actorId, + actorOrgId, + actorAuthMethod, + secrets: sanitizedSecrets }); - return secrets.map((secret) => - decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey) - ); + return { + type: SecretProtectionType.Direct as const, + secrets: secrets.map((secret) => + decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey) + ) + }; }; const deleteManySecretsRaw = async ({ actorId, projectSlug, + projectId: optionalProjectId, environment, actor, actorOrgId, @@ -1332,13 +1992,73 @@ export const secretServiceFactory = ({ secretPath, secrets: inputSecrets = [] }: TDeleteManySecretRawDTO) => { - const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - if (!project) throw new BadRequestError({ message: "Project not found" }); - const projectId = project.id; + if (!projectSlug && !optionalProjectId) + throw new BadRequestError({ message: "Must provide either project slug or projectId" }); - const botKey = await projectBotService.getBotKey(projectId); - if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" }); + let projectId = optionalProjectId as string; + if (!optionalProjectId && projectSlug) { + const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); + if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); + projectId = project.id; + } + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + const policy = + actor === ActorType.USER + ? await secretApprovalPolicyService.getSecretApprovalPolicy(projectId, environment, secretPath) + : undefined; + if (shouldUseSecretV2Bridge) { + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequestV2Bridge({ + policy, + actorAuthMethod, + actorOrgId, + actorId, + actor, + projectId, + environment, + secretPath, + data: { + [SecretOperations.Delete]: inputSecrets + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } + const secrets = await secretV2BridgeService.deleteManySecret({ + secretPath, + environment, + projectId, + actorAuthMethod, + actorOrgId, + actor, + actorId, + secrets: inputSecrets + }); + return { type: SecretProtectionType.Direct as const, secrets }; + } + + if (!botKey) + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); + + if (policy) { + const approval = await secretApprovalRequestService.generateSecretApprovalRequest({ + policy, + actorAuthMethod, + actorOrgId, + actorId, + actor, + projectId, + environment, + secretPath, + data: { + [SecretOperations.Delete]: inputSecrets.map((el) => ({ secretName: el.secretKey })) + } + }); + return { type: SecretProtectionType.Approval as const, approval }; + } const secrets = await deleteManySecret({ projectId, environment, @@ -1347,12 +2067,15 @@ export const secretServiceFactory = ({ actorId, actorOrgId, actorAuthMethod, - secrets: inputSecrets.map(({ secretKey }) => ({ secretName: secretKey, type: SecretType.Shared })) + secrets: inputSecrets.map(({ secretKey, type = SecretType.Shared }) => ({ secretName: secretKey, type })) }); - return secrets.map((secret) => - decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey) - ); + return { + type: SecretProtectionType.Direct as const, + secrets: secrets.map((secret) => + decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey) + ) + }; }; const getSecretVersions = async ({ @@ -1364,11 +2087,31 @@ export const secretServiceFactory = ({ offset = 0, secretId }: TGetSecretVersionsDTO) => { - const secret = await secretDAL.findById(secretId); - if (!secret) throw new BadRequestError({ message: "Failed to find secret" }); + const secretVersionV2 = await secretV2BridgeService + .getSecretVersions({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + limit, + offset, + secretId + }) + .catch((err) => { + if ((err as Error).message === "BadRequest: Failed to find secret") { + return null; + } + }); + if (secretVersionV2) return secretVersionV2; + const secret = await secretDAL.findById(secretId); + if (!secret) throw new NotFoundError({ message: `Secret with ID '${secretId}' not found` }); const folder = await folderDAL.findById(secret.folderId); - if (!folder) throw new BadRequestError({ message: "Failed to find secret" }); + if (!folder) throw new NotFoundError({ message: `Folder with ID '${secret.folderId}' not found` }); + + const { botKey } = await projectBotService.getBotKey(folder.projectId); + if (!botKey) + throw new NotFoundError({ message: `Project bot for project with ID '${folder.projectId}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -1378,9 +2121,18 @@ export const secretServiceFactory = ({ actorOrgId ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback); - const secretVersions = await secretVersionDAL.find({ secretId }, { offset, limit, sort: [["createdAt", "desc"]] }); - return secretVersions; + return secretVersions.map((el) => + decryptSecretRaw( + { + ...el, + workspace: folder.projectId, + environment: folder.environment.envSlug, + secretPath: "/" + }, + botKey + ) + ); }; const attachTags = async ({ @@ -1396,7 +2148,6 @@ export const secretServiceFactory = ({ actorId }: TAttachSecretTagsDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -1425,12 +2176,14 @@ export const secretServiceFactory = ({ }); if (!secret) { - throw new BadRequestError({ message: "Secret not found" }); + throw new NotFoundError({ message: `Secret with name '${secretName}' not found` }); } const folder = await folderDAL.findBySecretPath(project.id, environment, secretPath); if (!folder) { - throw new BadRequestError({ message: "Folder not found" }); + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); } const tags = await secretTagDAL.find({ @@ -1441,7 +2194,7 @@ export const secretServiceFactory = ({ }); if (tags.length !== tagSlugs.length) { - throw new BadRequestError({ message: "One or more tags not found." }); + throw new NotFoundError({ message: "One or more tags not found." }); } const existingSecretTags = await secretDAL.getSecretTags(secret.id); @@ -1482,7 +2235,7 @@ export const secretServiceFactory = ({ return { ...updatedSecret[0], - tags: [...existingSecretTags, ...tags].map((t) => ({ id: t.id, slug: t.slug, name: t.name, color: t.color })) + tags: [...existingSecretTags, ...tags].map((t) => ({ id: t.id, slug: t.slug, name: t.slug, color: t.color })) }; }; @@ -1499,7 +2252,6 @@ export const secretServiceFactory = ({ actorId }: TAttachSecretTagsDTO) => { const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); - const { permission } = await permissionService.getProjectPermission( actor, actorId, @@ -1528,12 +2280,14 @@ export const secretServiceFactory = ({ }); if (!secret) { - throw new BadRequestError({ message: "Secret not found" }); + throw new NotFoundError({ message: `Secret with name '${secretName}' not found` }); } const folder = await folderDAL.findBySecretPath(project.id, environment, secretPath); if (!folder) { - throw new BadRequestError({ message: "Folder not found" }); + throw new NotFoundError({ + message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found` + }); } const tags = await secretTagDAL.find({ @@ -1544,7 +2298,7 @@ export const secretServiceFactory = ({ }); if (tags.length !== tagSlugs.length) { - throw new BadRequestError({ message: "One or more tags not found." }); + throw new NotFoundError({ message: "One or more tags not found." }); } const existingSecretTags = await secretDAL.getSecretTags(secret.id); @@ -1554,7 +2308,7 @@ export const secretServiceFactory = ({ const secretTagIds = existingSecretTags.map((tag) => tag.id); if (!tagIdsToRemove.every((el) => secretTagIds.includes(el))) { - throw new BadRequestError({ message: "One or more tags not found on the secret" }); + throw new NotFoundError({ message: "One or more tags not found on the secret" }); } const newTags = existingSecretTags.filter((tag) => !tagIdsToRemove.includes(tag.id)); @@ -1612,11 +2366,24 @@ export const secretServiceFactory = ({ ); if (!hasRole(ProjectMembershipRole.Admin)) - throw new BadRequestError({ message: "Only admins are allowed to take this action" }); + throw new ForbiddenRequestError({ message: "Only admins are allowed to take this action" }); + + const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); + if (shouldUseSecretV2Bridge) { + return secretV2BridgeService.backfillSecretReferences({ + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod + }); + } - const botKey = await projectBotService.getBotKey(projectId); if (!botKey) - throw new BadRequestError({ message: "Please upgrade your project first", name: "bot_not_found_error" }); + throw new NotFoundError({ + message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); await secretDAL.transaction(async (tx) => { const secrets = await secretDAL.findAllProjectSecretValues(projectId, tx); @@ -1639,6 +2406,468 @@ export const secretServiceFactory = ({ return { message: "Successfully backfilled secret references" }; }; + const moveSecrets = async ({ + sourceEnvironment, + sourceSecretPath, + destinationEnvironment, + destinationSecretPath, + secretIds, + projectSlug, + shouldOverwrite, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TMoveSecretsDTO) => { + const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); + if (!project) { + throw new NotFoundError({ + message: `Project with slug '${projectSlug}' not found` + }); + } + if (project.version === 3) { + return secretV2BridgeService.moveSecrets({ + sourceEnvironment, + sourceSecretPath, + destinationEnvironment, + destinationSecretPath, + secretIds, + projectId: project.id, + shouldOverwrite, + actor, + actorId, + actorAuthMethod, + actorOrgId + }); + } + + const { permission } = await permissionService.getProjectPermission( + actor, + actorId, + project.id, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + subject(ProjectPermissionSub.Secrets, { + environment: sourceEnvironment, + secretPath: sourceSecretPath + }) + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + subject(ProjectPermissionSub.Secrets, { + environment: destinationEnvironment, + secretPath: destinationSecretPath + }) + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.Secrets, { + environment: destinationEnvironment, + secretPath: destinationSecretPath + }) + ); + + const { botKey } = await projectBotService.getBotKey(project.id); + if (!botKey) { + throw new NotFoundError({ + message: `Project bot for project with ID '${project.id}' not found. Please upgrade your project.`, + name: "bot_not_found_error" + }); + } + + const sourceFolder = await folderDAL.findBySecretPath(project.id, sourceEnvironment, sourceSecretPath); + if (!sourceFolder) { + throw new NotFoundError({ + message: `Source folder with path '${sourceSecretPath}' in environment with slug '${sourceEnvironment}' not found` + }); + } + + const destinationFolder = await folderDAL.findBySecretPath( + project.id, + destinationEnvironment, + destinationSecretPath + ); + + if (!destinationFolder) { + throw new NotFoundError({ + message: `Destination folder with path '${destinationSecretPath}' in environment with slug '${destinationEnvironment}' not found` + }); + } + + const sourceSecrets = await secretDAL.find({ + type: SecretType.Shared, + $in: { + id: secretIds + } + }); + + if (sourceSecrets.length !== secretIds.length) { + throw new BadRequestError({ + message: "Invalid secrets" + }); + } + + const decryptedSourceSecrets = sourceSecrets.map((secret) => ({ + ...secret, + secretKey: decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretKeyCiphertext, + iv: secret.secretKeyIV, + tag: secret.secretKeyTag, + key: botKey + }), + secretValue: decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretValueCiphertext, + iv: secret.secretValueIV, + tag: secret.secretValueTag, + key: botKey + }) + })); + + let isSourceUpdated = false; + let isDestinationUpdated = false; + + // Moving secrets is a two-step process. + await secretDAL.transaction(async (tx) => { + // First step is to create/update the secret in the destination: + const destinationSecretsFromDB = await secretDAL.find( + { + folderId: destinationFolder.id + }, + { tx } + ); + + const decryptedDestinationSecrets = destinationSecretsFromDB.map((secret) => { + return { + ...secret, + secretKey: decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretKeyCiphertext, + iv: secret.secretKeyIV, + tag: secret.secretKeyTag, + key: botKey + }), + secretValue: decryptSymmetric128BitHexKeyUTF8({ + ciphertext: secret.secretValueCiphertext, + iv: secret.secretValueIV, + tag: secret.secretValueTag, + key: botKey + }) + }; + }); + + const destinationSecretsGroupedByBlindIndex = groupBy( + decryptedDestinationSecrets.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex)), + (i) => i.secretBlindIndex as string + ); + + const locallyCreatedSecrets = decryptedSourceSecrets + .filter(({ secretBlindIndex }) => !destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]) + .map((el) => ({ ...el, operation: SecretOperations.Create })); + + const locallyUpdatedSecrets = decryptedSourceSecrets + .filter( + ({ secretBlindIndex, secretKey, secretValue }) => + destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0] && + // if key or value changed + (destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretKey !== secretKey || + destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretValue !== secretValue) + ) + .map((el) => ({ ...el, operation: SecretOperations.Update })); + + if (locallyUpdatedSecrets.length > 0 && !shouldOverwrite) { + const existingKeys = locallyUpdatedSecrets.map((s) => s.secretKey); + + throw new BadRequestError({ + message: `Failed to move secrets. The following secrets already exist in the destination: ${existingKeys.join( + "," + )}` + }); + } + + const isEmpty = locallyCreatedSecrets.length + locallyUpdatedSecrets.length === 0; + + if (isEmpty) { + throw new BadRequestError({ + message: "Selected secrets already exist in the destination." + }); + } + const destinationFolderPolicy = await secretApprovalPolicyService.getSecretApprovalPolicy( + project.id, + destinationFolder.environment.slug, + destinationFolder.path + ); + + if (destinationFolderPolicy && actor === ActorType.USER) { + // if secret approval policy exists for destination, we create the secret approval request + const localSecretsIds = decryptedDestinationSecrets.map(({ id }) => id); + const latestSecretVersions = await secretVersionDAL.findLatestVersionMany( + destinationFolder.id, + localSecretsIds, + tx + ); + + const approvalRequestDoc = await secretApprovalRequestDAL.create( + { + folderId: destinationFolder.id, + slug: alphaNumericNanoId(), + policyId: destinationFolderPolicy.id, + status: "open", + hasMerged: false, + committerUserId: actorId + }, + tx + ); + + const commits = locallyCreatedSecrets.concat(locallyUpdatedSecrets).map((doc) => { + const { operation } = doc; + const localSecret = destinationSecretsGroupedByBlindIndex[doc.secretBlindIndex as string]?.[0]; + + return { + op: operation, + keyEncoding: doc.keyEncoding, + algorithm: doc.algorithm, + requestId: approvalRequestDoc.id, + metadata: doc.metadata, + secretKeyIV: doc.secretKeyIV, + secretKeyTag: doc.secretKeyTag, + secretKeyCiphertext: doc.secretKeyCiphertext, + secretValueIV: doc.secretValueIV, + secretValueTag: doc.secretValueTag, + secretValueCiphertext: doc.secretValueCiphertext, + secretBlindIndex: doc.secretBlindIndex, + secretCommentIV: doc.secretCommentIV, + secretCommentTag: doc.secretCommentTag, + secretCommentCiphertext: doc.secretCommentCiphertext, + skipMultilineEncoding: doc.skipMultilineEncoding, + // except create operation other two needs the secret id and version id + ...(operation !== SecretOperations.Create + ? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id } + : {}) + }; + }); + await secretApprovalRequestSecretDAL.insertMany(commits, tx); + } else { + // apply changes directly + if (locallyCreatedSecrets.length) { + await fnSecretBulkInsert({ + folderId: destinationFolder.id, + secretVersionDAL, + secretDAL, + tx, + secretTagDAL, + secretVersionTagDAL, + inputSecrets: locallyCreatedSecrets.map((doc) => { + return { + keyEncoding: doc.keyEncoding, + algorithm: doc.algorithm, + type: doc.type, + metadata: doc.metadata, + secretKeyIV: doc.secretKeyIV, + secretKeyTag: doc.secretKeyTag, + secretKeyCiphertext: doc.secretKeyCiphertext, + secretValueIV: doc.secretValueIV, + secretValueTag: doc.secretValueTag, + secretValueCiphertext: doc.secretValueCiphertext, + secretBlindIndex: doc.secretBlindIndex, + secretCommentIV: doc.secretCommentIV, + secretCommentTag: doc.secretCommentTag, + secretCommentCiphertext: doc.secretCommentCiphertext, + skipMultilineEncoding: doc.skipMultilineEncoding + }; + }) + }); + } + if (locallyUpdatedSecrets.length) { + await fnSecretBulkUpdate({ + projectId: project.id, + folderId: destinationFolder.id, + secretVersionDAL, + secretDAL, + tx, + secretTagDAL, + secretVersionTagDAL, + inputSecrets: locallyUpdatedSecrets.map((doc) => { + return { + filter: { + folderId: destinationFolder.id, + id: destinationSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id + }, + data: { + keyEncoding: doc.keyEncoding, + algorithm: doc.algorithm, + type: doc.type, + metadata: doc.metadata, + secretKeyIV: doc.secretKeyIV, + secretKeyTag: doc.secretKeyTag, + secretKeyCiphertext: doc.secretKeyCiphertext, + secretValueIV: doc.secretValueIV, + secretValueTag: doc.secretValueTag, + secretValueCiphertext: doc.secretValueCiphertext, + secretBlindIndex: doc.secretBlindIndex, + secretCommentIV: doc.secretCommentIV, + secretCommentTag: doc.secretCommentTag, + secretCommentCiphertext: doc.secretCommentCiphertext, + skipMultilineEncoding: doc.skipMultilineEncoding + } + }; + }) + }); + } + + isDestinationUpdated = true; + } + + // Next step is to delete the secrets from the source folder: + const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string); + const locallyDeletedSecrets = decryptedSourceSecrets.map((el) => ({ ...el, operation: SecretOperations.Delete })); + + const sourceFolderPolicy = await secretApprovalPolicyService.getSecretApprovalPolicy( + project.id, + sourceFolder.environment.slug, + sourceFolder.path + ); + + if (sourceFolderPolicy && actor === ActorType.USER) { + // if secret approval policy exists for source, we create the secret approval request + const localSecretsIds = decryptedSourceSecrets.map(({ id }) => id); + const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(sourceFolder.id, localSecretsIds, tx); + const approvalRequestDoc = await secretApprovalRequestDAL.create( + { + folderId: sourceFolder.id, + slug: alphaNumericNanoId(), + policyId: sourceFolderPolicy.id, + status: "open", + hasMerged: false, + committerUserId: actorId + }, + tx + ); + + const commits = locallyDeletedSecrets.map((doc) => { + const { operation } = doc; + const localSecret = sourceSecretsGroupByBlindIndex[doc.secretBlindIndex as string]?.[0]; + + return { + op: operation, + keyEncoding: doc.keyEncoding, + algorithm: doc.algorithm, + requestId: approvalRequestDoc.id, + metadata: doc.metadata, + secretKeyIV: doc.secretKeyIV, + secretKeyTag: doc.secretKeyTag, + secretKeyCiphertext: doc.secretKeyCiphertext, + secretValueIV: doc.secretValueIV, + secretValueTag: doc.secretValueTag, + secretValueCiphertext: doc.secretValueCiphertext, + secretBlindIndex: doc.secretBlindIndex, + secretCommentIV: doc.secretCommentIV, + secretCommentTag: doc.secretCommentTag, + secretCommentCiphertext: doc.secretCommentCiphertext, + skipMultilineEncoding: doc.skipMultilineEncoding, + secretId: localSecret.id, + secretVersion: latestSecretVersions[localSecret.id].id + }; + }); + + await secretApprovalRequestSecretDAL.insertMany(commits, tx); + } else { + // if no secret approval policy is present, we delete directly. + await secretDAL.delete( + { + $in: { + id: locallyDeletedSecrets.map(({ id }) => id) + }, + folderId: sourceFolder.id + }, + tx + ); + + isSourceUpdated = true; + } + }); + + if (isDestinationUpdated) { + await snapshotService.performSnapshot(destinationFolder.id); + await secretQueueService.syncSecrets({ + projectId: project.id, + secretPath: destinationFolder.path, + environmentSlug: destinationFolder.environment.slug, + actorId, + actor + }); + } + + if (isSourceUpdated) { + await snapshotService.performSnapshot(sourceFolder.id); + await secretQueueService.syncSecrets({ + projectId: project.id, + secretPath: sourceFolder.path, + environmentSlug: sourceFolder.environment.slug, + actorId, + actor + }); + } + + return { + projectId: project.id, + isSourceUpdated, + isDestinationUpdated + }; + }; + + const startSecretV2Migration = async ({ + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod + }: TStartSecretsV2MigrationDTO) => { + const { hasRole } = await permissionService.getProjectPermission( + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId + ); + + if (!hasRole(ProjectMembershipRole.Admin)) + throw new ForbiddenRequestError({ message: "Only admins are allowed to take this action" }); + + const { shouldUseSecretV2Bridge: isProjectV3, project } = await projectBotService.getBotKey(projectId); + if (isProjectV3) throw new BadRequestError({ message: "Project is already V3" }); + if (project.upgradeStatus === ProjectUpgradeStatus.InProgress) + throw new BadRequestError({ message: "Project is already being upgraded" }); + + await secretQueueService.startSecretV2Migration(projectId); + return { message: "Migrating project to new KMS architecture" }; + }; + + const getSecretsRawByFolderMappings = async ( + params: Omit, + actor: OrgServiceActor + ) => { + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(params.projectId); + + if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" }); + + const { permission } = await permissionService.getProjectPermission( + actor.type, + actor.id, + params.projectId, + actor.authMethod, + actor.orgId + ); + + const secrets = secretV2BridgeService.getSecretsByFolderMappings({ ...params, userId: actor.id }, permission); + + return secrets; + }; + return { attachTags, detachTags, @@ -1659,6 +2888,13 @@ export const secretServiceFactory = ({ updateManySecretsRaw, deleteManySecretsRaw, getSecretVersions, - backfillSecretReferences + backfillSecretReferences, + moveSecrets, + startSecretV2Migration, + getSecretsCount, + getSecretsCountMultiEnv, + getSecretsRawMultiEnv, + getSecretReferenceTree, + getSecretsRawByFolderMappings }; }; diff --git a/backend/src/services/secret/secret-types.ts b/backend/src/services/secret/secret-types.ts index 18a0077fe9..ca5c5a74bb 100644 --- a/backend/src/services/secret/secret-types.ts +++ b/backend/src/services/secret/secret-types.ts @@ -1,7 +1,8 @@ import { Knex } from "knex"; +import { z } from "zod"; import { SecretType, TSecretBlindIndexes, TSecrets, TSecretsInsert, TSecretsUpdate } from "@app/db/schemas"; -import { TProjectPermission } from "@app/lib/types"; +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal"; import { TSecretDALFactory } from "@app/services/secret/secret-dal"; @@ -12,11 +13,38 @@ import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-fold import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; import { ActorType } from "../auth/auth-type"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; +import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal"; +import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal"; type TPartialSecret = Pick; type TPartialInputSecret = Pick; +export const FailedIntegrationSyncEmailsPayloadSchema = z.object({ + projectId: z.string(), + secretPath: z.string(), + environmentName: z.string(), + environmentSlug: z.string(), + + count: z.number(), + syncMessage: z.string().optional(), + manuallyTriggeredByUserId: z.string().optional() +}); + +export type TFailedIntegrationSyncEmailsPayload = z.infer; + +export type TIntegrationSyncPayload = { + isManual?: boolean; + actorId?: string; + projectId: string; + environment: string; + secretPath: string; + depth?: number; + deDupeQueue?: Record; +}; + export type TCreateSecretDTO = { secretName: string; path: string; @@ -77,6 +105,8 @@ export type TGetSecretsDTO = { environment: string; includeImports?: boolean; recursive?: boolean; + limit?: number; + offset?: number; } & TProjectPermission; export type TGetASecretDTO = { @@ -139,18 +169,30 @@ export type TDeleteBulkSecretDTO = { }>; } & TProjectPermission; +export enum SecretsOrderBy { + Name = "name" // "key" for secrets but using name for use across resources +} + export type TGetSecretsRawDTO = { expandSecretReferences?: boolean; path: string; environment: string; includeImports?: boolean; recursive?: boolean; + tagSlugs?: string[]; + orderBy?: SecretsOrderBy; + orderDirection?: OrderByDirection; + offset?: number; + limit?: number; + search?: string; + keys?: string[]; } & TProjectPermission; export type TGetASecretRawDTO = { secretName: string; path: string; environment: string; + expandSecretReferences?: boolean; type: "shared" | "personal"; includeImports?: boolean; version?: number; @@ -159,13 +201,16 @@ export type TGetASecretRawDTO = { } & Omit; export type TCreateSecretRawDTO = TProjectPermission & { + secretName: string; secretPath: string; environment: string; - secretName: string; secretValue: string; type: SecretType; + tagIds?: string[]; secretComment?: string; skipMultilineEncoding?: boolean; + secretReminderRepeatDays?: number | null; + secretReminderNote?: string | null; }; export type TUpdateSecretRawDTO = TProjectPermission & { @@ -173,10 +218,16 @@ export type TUpdateSecretRawDTO = TProjectPermission & { environment: string; secretName: string; secretValue?: string; + newSecretName?: string; + secretComment?: string; type: SecretType; + tagIds?: string[]; skipMultilineEncoding?: boolean; secretReminderRepeatDays?: number | null; secretReminderNote?: string | null; + metadata?: { + source?: string; + }; }; export type TDeleteSecretRawDTO = TProjectPermission & { @@ -188,34 +239,46 @@ export type TDeleteSecretRawDTO = TProjectPermission & { export type TCreateManySecretRawDTO = Omit & { secretPath: string; - projectSlug: string; + projectId?: string; + projectSlug?: string; environment: string; secrets: { secretKey: string; secretValue: string; secretComment?: string; skipMultilineEncoding?: boolean; + tagIds?: string[]; + metadata?: { + source?: string; + }; }[]; }; export type TUpdateManySecretRawDTO = Omit & { secretPath: string; - projectSlug: string; + projectId?: string; + projectSlug?: string; environment: string; secrets: { secretKey: string; + newSecretName?: string; secretValue: string; secretComment?: string; skipMultilineEncoding?: boolean; + tagIds?: string[]; + secretReminderRepeatDays?: number | null; + secretReminderNote?: string | null; }[]; }; export type TDeleteManySecretRawDTO = Omit & { secretPath: string; - projectSlug: string; + projectId?: string; + projectSlug?: string; environment: string; secrets: { secretKey: string; + type?: SecretType; }[]; }; @@ -319,6 +382,13 @@ export type TCreateManySecretsRawFnFactory = { secretTagDAL: TSecretTagDALFactory; secretVersionTagDAL: TSecretVersionTagDALFactory; folderDAL: TSecretFolderDALFactory; + kmsService: Pick; + secretV2BridgeDAL: Pick< + TSecretV2BridgeDALFactory, + "insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" + >; + secretVersionV2BridgeDAL: Pick; + secretVersionTagV2BridgeDAL: Pick; }; export type TCreateManySecretsRawFn = { @@ -348,6 +418,13 @@ export type TUpdateManySecretsRawFnFactory = { secretTagDAL: TSecretTagDALFactory; secretVersionTagDAL: TSecretVersionTagDALFactory; folderDAL: TSecretFolderDALFactory; + kmsService: Pick; + secretV2BridgeDAL: Pick< + TSecretV2BridgeDALFactory, + "insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" + >; + secretVersionV2BridgeDAL: Pick; + secretVersionTagV2BridgeDAL: Pick; }; export type TUpdateManySecretsRawFn = { @@ -394,3 +471,20 @@ export type TSyncSecretsDTO = { // used for import creation to trigger replication pickOnlyImportIds?: string[]; }); + +export type TMoveSecretsDTO = { + projectSlug: string; + sourceEnvironment: string; + sourceSecretPath: string; + destinationEnvironment: string; + destinationSecretPath: string; + secretIds: string[]; + shouldOverwrite: boolean; +} & Omit; + +export enum SecretProtectionType { + Approval = "approval", + Direct = "direct" +} + +export type TStartSecretsV2MigrationDTO = TProjectPermission; diff --git a/backend/src/services/secret/secret-version-dal.ts b/backend/src/services/secret/secret-version-dal.ts index 203406e301..8e77858a53 100644 --- a/backend/src/services/secret/secret-version-dal.ts +++ b/backend/src/services/secret/secret-version-dal.ts @@ -2,8 +2,10 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TSecretVersions, TSecretVersionsUpdate } from "@app/db/schemas"; -import { BadRequestError, DatabaseError } from "@app/lib/errors"; +import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { logger } from "@app/lib/logger"; +import { QueueName } from "@app/queue"; export type TSecretVersionDALFactory = ReturnType; @@ -13,7 +15,7 @@ export const secretVersionDALFactory = (db: TDbClient) => { // This will fetch all latest secret versions from a folder const findLatestVersionByFolderId = async (folderId: string, tx?: Knex) => { try { - const docs = await (tx || db)(TableName.SecretVersion) + const docs = await (tx || db.replicaNode())(TableName.SecretVersion) .where(`${TableName.SecretVersion}.folderId`, folderId) .join(TableName.Secret, `${TableName.Secret}.id`, `${TableName.SecretVersion}.secretId`) .join( @@ -70,7 +72,7 @@ export const secretVersionDALFactory = (db: TDbClient) => { ); if (existingSecretVersions.length !== data.length) { - throw new BadRequestError({ message: "Some of the secret versions do not exist" }); + throw new NotFoundError({ message: "One or more secret versions not found" }); } if (data.length === 0) return []; @@ -90,7 +92,7 @@ export const secretVersionDALFactory = (db: TDbClient) => { const findLatestVersionMany = async (folderId: string, secretIds: string[], tx?: Knex) => { try { if (!secretIds.length) return {}; - const docs: Array = await (tx || db)(TableName.SecretVersion) + const docs: Array = await (tx || db.replicaNode())(TableName.SecretVersion) .where("folderId", folderId) .whereIn(`${TableName.SecretVersion}.secretId`, secretIds) .join( @@ -111,8 +113,39 @@ export const secretVersionDALFactory = (db: TDbClient) => { } }; + const pruneExcessVersions = async () => { + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 started`); + try { + await db(TableName.SecretVersion) + .with("version_cte", (qb) => { + void qb + .from(TableName.SecretVersion) + .select( + "id", + "folderId", + db.raw( + `ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretVersion}."secretId" ORDER BY ${TableName.SecretVersion}."createdAt" DESC) AS row_num` + ) + ); + }) + .join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.SecretVersion}.folderId`) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) + .join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`) + .join("version_cte", "version_cte.id", `${TableName.SecretVersion}.id`) + .whereRaw(`version_cte.row_num > ${TableName.Project}."pitVersionLimit"`) + .delete(); + } catch (error) { + throw new DatabaseError({ + error, + name: "Secret Version Prune" + }); + } + logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 completed`); + }; + return { ...secretVersionOrm, + pruneExcessVersions, findLatestVersionMany, bulkUpdate, findLatestVersionByFolderId, diff --git a/backend/src/services/service-token/service-token-dal.ts b/backend/src/services/service-token/service-token-dal.ts index 5d3fcc5c80..ed9c5de7e9 100644 --- a/backend/src/services/service-token/service-token-dal.ts +++ b/backend/src/services/service-token/service-token-dal.ts @@ -12,7 +12,7 @@ export const serviceTokenDALFactory = (db: TDbClient) => { const findById = async (id: string, tx?: Knex) => { try { - const doc = await (tx || db)(TableName.ServiceToken) + const doc = await (tx || db.replicaNode())(TableName.ServiceToken) .leftJoin( TableName.Users, `${TableName.Users}.id`, diff --git a/backend/src/services/service-token/service-token-service.ts b/backend/src/services/service-token/service-token-service.ts index e434bd91fd..fe2c1c0d29 100644 --- a/backend/src/services/service-token/service-token-service.ts +++ b/backend/src/services/service-token/service-token-service.ts @@ -6,8 +6,9 @@ import bcrypt from "bcrypt"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { TAccessTokenQueueServiceFactory } from "../access-token-queue/access-token-queue"; import { ActorType } from "../auth/auth-type"; import { TProjectDALFactory } from "../project/project-dal"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; @@ -26,6 +27,7 @@ type TServiceTokenServiceFactoryDep = { permissionService: Pick; projectEnvDAL: Pick; projectDAL: Pick; + accessTokenQueue: Pick; }; export type TServiceTokenServiceFactory = ReturnType; @@ -35,7 +37,8 @@ export const serviceTokenServiceFactory = ({ userDAL, permissionService, projectEnvDAL, - projectDAL + projectDAL, + accessTokenQueue }: TServiceTokenServiceFactoryDep) => { const createServiceToken = async ({ iv, @@ -72,7 +75,8 @@ export const serviceTokenServiceFactory = ({ // validates env const scopeEnvs = [...new Set(scopes.map(({ environment }) => environment))]; const inputEnvs = await projectEnvDAL.findBySlugs(projectId, scopeEnvs); - if (inputEnvs.length !== scopeEnvs.length) throw new BadRequestError({ message: "Environment not found" }); + if (inputEnvs.length !== scopeEnvs.length) + throw new NotFoundError({ message: `One or more selected environments not found` }); const secret = crypto.randomBytes(16).toString("hex"); const secretHash = await bcrypt.hash(secret, appCfg.SALT_ROUNDS); @@ -103,7 +107,7 @@ export const serviceTokenServiceFactory = ({ const deleteServiceToken = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TDeleteServiceTokenDTO) => { const serviceToken = await serviceTokenDAL.findById(id); - if (!serviceToken) throw new BadRequestError({ message: "Token not found" }); + if (!serviceToken) throw new NotFoundError({ message: `Service token with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -119,13 +123,15 @@ export const serviceTokenServiceFactory = ({ }; const getServiceToken = async ({ actor, actorId }: TGetServiceTokenInfoDTO) => { - if (actor !== ActorType.SERVICE) throw new BadRequestError({ message: "Service token not found" }); + if (actor !== ActorType.SERVICE) + throw new NotFoundError({ message: `Service token with ID '${actorId}' not found` }); const serviceToken = await serviceTokenDAL.findById(actorId); - if (!serviceToken) throw new BadRequestError({ message: "Token not found" }); + if (!serviceToken) throw new NotFoundError({ message: `Service token with ID '${actorId}' not found` }); const serviceTokenUser = await userDAL.findById(serviceToken.createdBy); - if (!serviceTokenUser) throw new BadRequestError({ message: "Service token user not found" }); + if (!serviceTokenUser) + throw new NotFoundError({ message: `Service token with ID ${serviceToken.id} has no associated creator` }); return { serviceToken, user: serviceTokenUser }; }; @@ -151,26 +157,24 @@ export const serviceTokenServiceFactory = ({ }; const fnValidateServiceToken = async (token: string) => { - const [, TOKEN_IDENTIFIER, TOKEN_SECRET] = <[string, string, string]>token.split(".", 3); - const serviceToken = await serviceTokenDAL.findById(TOKEN_IDENTIFIER); + const [, tokenIdentifier, tokenSecret] = <[string, string, string]>token.split(".", 3); + const serviceToken = await serviceTokenDAL.findById(tokenIdentifier); - if (!serviceToken) throw new UnauthorizedError(); + if (!serviceToken) throw new NotFoundError({ message: `Service token with ID '${tokenIdentifier}' not found` }); const project = await projectDAL.findById(serviceToken.projectId); - if (!project) throw new UnauthorizedError({ message: "Service token project not found" }); + if (!project) throw new NotFoundError({ message: `Project with ID '${serviceToken.projectId}' not found` }); if (serviceToken.expiresAt && new Date(serviceToken.expiresAt) < new Date()) { await serviceTokenDAL.deleteById(serviceToken.id); - throw new UnauthorizedError({ message: "failed to authenticate expired service token" }); + throw new ForbiddenRequestError({ message: "Service token has expired" }); } - const isMatch = await bcrypt.compare(TOKEN_SECRET, serviceToken.secretHash); - if (!isMatch) throw new UnauthorizedError(); - const updatedToken = await serviceTokenDAL.updateById(serviceToken.id, { - lastUsed: new Date() - }); + const isMatch = await bcrypt.compare(tokenSecret, serviceToken.secretHash); + if (!isMatch) throw new UnauthorizedError({ message: "Invalid service token" }); + await accessTokenQueue.updateServiceTokenStatus(serviceToken.id); - return { ...serviceToken, lastUsed: updatedToken.lastUsed, orgId: project.orgId }; + return { ...serviceToken, lastUsed: new Date(), orgId: project.orgId }; }; return { diff --git a/backend/src/services/slack/project-slack-config-dal.ts b/backend/src/services/slack/project-slack-config-dal.ts new file mode 100644 index 0000000000..276442b1b8 --- /dev/null +++ b/backend/src/services/slack/project-slack-config-dal.ts @@ -0,0 +1,25 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; + +export type TProjectSlackConfigDALFactory = ReturnType; + +export const projectSlackConfigDALFactory = (db: TDbClient) => { + const projectSlackConfigOrm = ormify(db, TableName.ProjectSlackConfigs); + + const getIntegrationDetailsByProject = (projectId: string, tx?: Knex) => { + return (tx || db.replicaNode())(TableName.ProjectSlackConfigs) + .join( + TableName.SlackIntegrations, + `${TableName.ProjectSlackConfigs}.slackIntegrationId`, + `${TableName.SlackIntegrations}.id` + ) + .where("projectId", "=", projectId) + .select(selectAllTableCols(TableName.ProjectSlackConfigs), selectAllTableCols(TableName.SlackIntegrations)) + .first(); + }; + + return { ...projectSlackConfigOrm, getIntegrationDetailsByProject }; +}; diff --git a/backend/src/services/slack/slack-auth-validators.ts b/backend/src/services/slack/slack-auth-validators.ts new file mode 100644 index 0000000000..c88ba49eaf --- /dev/null +++ b/backend/src/services/slack/slack-auth-validators.ts @@ -0,0 +1,16 @@ +import z from "zod"; + +export const validateSlackChannelsField = z + .string() + .trim() + .default("") + .transform((data) => { + if (data === "") return ""; + return data + .split(",") + .map((id) => id.trim()) + .join(", "); + }) + .refine((data) => data.split(",").length <= 20, { + message: "You can only select up to 20 slack channels" + }); diff --git a/backend/src/services/slack/slack-fns.ts b/backend/src/services/slack/slack-fns.ts new file mode 100644 index 0000000000..919f9de054 --- /dev/null +++ b/backend/src/services/slack/slack-fns.ts @@ -0,0 +1,177 @@ +import { WebClient } from "@slack/web-api"; + +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; + +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; +import { TProjectDALFactory } from "../project/project-dal"; +import { TProjectSlackConfigDALFactory } from "./project-slack-config-dal"; +import { SlackTriggerFeature, TSlackNotification } from "./slack-types"; + +export const fetchSlackChannels = async (botKey: string) => { + const slackChannels: { + name: string; + id: string; + }[] = []; + + const slackWebClient = new WebClient(botKey); + let cursor; + + do { + // eslint-disable-next-line no-await-in-loop + const response = await slackWebClient.conversations.list({ + cursor, + limit: 1000, + types: "public_channel,private_channel" + }); + + response.channels?.forEach((channel) => + slackChannels.push({ + name: channel.name_normalized as string, + id: channel.id as string + }) + ); + + // Set the cursor for the next page + cursor = response.response_metadata?.next_cursor; + } while (cursor); // Continue while there is a cursor + + return slackChannels; +}; + +const buildSlackPayload = (notification: TSlackNotification) => { + const appCfg = getConfig(); + + switch (notification.type) { + case SlackTriggerFeature.SECRET_APPROVAL: { + const { payload } = notification; + const messageBody = `A secret approval request has been opened by ${payload.userEmail}. +*Environment*: ${payload.environment} +*Secret path*: ${payload.secretPath || "/"} + +View the complete details <${appCfg.SITE_URL}/project/${payload.projectId}/approval?requestId=${ + payload.requestId + }|here>.`; + + const payloadBlocks = [ + { + type: "header", + text: { + type: "plain_text", + text: "Secret approval request", + emoji: true + } + }, + { + type: "section", + text: { + type: "mrkdwn", + text: messageBody + } + } + ]; + + return { + payloadMessage: messageBody, + payloadBlocks + }; + } + case SlackTriggerFeature.ACCESS_REQUEST: { + const { payload } = notification; + const messageBody = `${payload.requesterFullName} (${payload.requesterEmail}) has requested ${ + payload.isTemporary ? "temporary" : "permanent" + } access to ${payload.secretPath} in the ${payload.environment} environment of ${payload.projectName}. + +The following permissions are requested: ${payload.permissions.join(", ")} + +View the request and approve or deny it <${payload.approvalUrl}|here>.`; + + const payloadBlocks = [ + { + type: "header", + text: { + type: "plain_text", + text: "New access approval request pending for review", + emoji: true + } + }, + { + type: "section", + text: { + type: "mrkdwn", + text: messageBody + } + } + ]; + + return { + payloadMessage: messageBody, + payloadBlocks + }; + } + default: { + throw new BadRequestError({ + message: "Slack notification type not supported." + }); + } + } +}; + +export const triggerSlackNotification = async ({ + projectId, + notification, + projectSlackConfigDAL, + projectDAL, + kmsService +}: { + projectId: string; + notification: TSlackNotification; + projectSlackConfigDAL: Pick; + projectDAL: Pick; + kmsService: Pick; +}) => { + const { payloadMessage, payloadBlocks } = buildSlackPayload(notification); + const project = await projectDAL.findById(projectId); + const slackIntegration = await projectSlackConfigDAL.getIntegrationDetailsByProject(project.id); + + if (!slackIntegration) { + return; + } + + let targetChannelIds: string[] = []; + if (notification.type === SlackTriggerFeature.ACCESS_REQUEST) { + targetChannelIds = slackIntegration.accessRequestChannels?.split(", ") || []; + if (!targetChannelIds.length || !slackIntegration.isAccessRequestNotificationEnabled) { + return; + } + } else if (notification.type === SlackTriggerFeature.SECRET_APPROVAL) { + targetChannelIds = slackIntegration.secretRequestChannels?.split(", ") || []; + if (!targetChannelIds.length || !slackIntegration.isSecretRequestNotificationEnabled) { + return; + } + } + + const { decryptor: orgDataKeyDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: project.orgId + }); + + const botKey = orgDataKeyDecryptor({ + cipherTextBlob: slackIntegration.encryptedBotAccessToken + }).toString("utf8"); + + const slackWebClient = new WebClient(botKey); + + for await (const conversationId of targetChannelIds) { + // we send both text and blocks for compatibility with barebone clients + await slackWebClient.chat + .postMessage({ + channel: conversationId, + text: payloadMessage, + blocks: payloadBlocks + }) + .catch((err) => logger.error(err)); + } +}; diff --git a/backend/src/services/slack/slack-integration-dal.ts b/backend/src/services/slack/slack-integration-dal.ts new file mode 100644 index 0000000000..d52c099613 --- /dev/null +++ b/backend/src/services/slack/slack-integration-dal.ts @@ -0,0 +1,56 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName, TSlackIntegrations, TWorkflowIntegrations } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; + +export type TSlackIntegrationDALFactory = ReturnType; + +export const slackIntegrationDALFactory = (db: TDbClient) => { + const slackIntegrationOrm = ormify(db, TableName.SlackIntegrations); + + const findByIdWithWorkflowIntegrationDetails = async (id: string, tx?: Knex) => { + try { + return await (tx || db.replicaNode())(TableName.SlackIntegrations) + .join( + TableName.WorkflowIntegrations, + `${TableName.SlackIntegrations}.id`, + `${TableName.WorkflowIntegrations}.id` + ) + .select(selectAllTableCols(TableName.SlackIntegrations)) + .select(db.ref("orgId").withSchema(TableName.WorkflowIntegrations)) + .select(db.ref("description").withSchema(TableName.WorkflowIntegrations)) + .select(db.ref("integration").withSchema(TableName.WorkflowIntegrations)) + .select(db.ref("slug").withSchema(TableName.WorkflowIntegrations)) + .where(`${TableName.WorkflowIntegrations}.id`, "=", id) + .first(); + } catch (error) { + throw new DatabaseError({ error, name: "Find by ID with Workflow integration details" }); + } + }; + + const findWithWorkflowIntegrationDetails = async ( + filter: Partial & Partial, + tx?: Knex + ) => { + try { + return await (tx || db.replicaNode())(TableName.SlackIntegrations) + .join( + TableName.WorkflowIntegrations, + `${TableName.SlackIntegrations}.id`, + `${TableName.WorkflowIntegrations}.id` + ) + .select(selectAllTableCols(TableName.SlackIntegrations)) + .select(db.ref("orgId").withSchema(TableName.WorkflowIntegrations)) + .select(db.ref("description").withSchema(TableName.WorkflowIntegrations)) + .select(db.ref("integration").withSchema(TableName.WorkflowIntegrations)) + .select(db.ref("slug").withSchema(TableName.WorkflowIntegrations)) + .where(filter); + } catch (error) { + throw new DatabaseError({ error, name: "Find with Workflow integration details" }); + } + }; + + return { ...slackIntegrationOrm, findByIdWithWorkflowIntegrationDetails, findWithWorkflowIntegrationDetails }; +}; diff --git a/backend/src/services/slack/slack-service.ts b/backend/src/services/slack/slack-service.ts new file mode 100644 index 0000000000..9c1460c37b --- /dev/null +++ b/backend/src/services/slack/slack-service.ts @@ -0,0 +1,461 @@ +import { ForbiddenError } from "@casl/ability"; +import { InstallProvider } from "@slack/oauth"; + +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { getConfig } from "@app/lib/config/env"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; + +import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; +import { getServerCfg } from "../super-admin/super-admin-service"; +import { TWorkflowIntegrationDALFactory } from "../workflow-integration/workflow-integration-dal"; +import { WorkflowIntegration } from "../workflow-integration/workflow-integration-types"; +import { fetchSlackChannels } from "./slack-fns"; +import { TSlackIntegrationDALFactory } from "./slack-integration-dal"; +import { + TCompleteSlackIntegrationDTO, + TDeleteSlackIntegrationDTO, + TGetReinstallUrlDTO, + TGetSlackInstallUrlDTO, + TGetSlackIntegrationByIdDTO, + TGetSlackIntegrationByOrgDTO, + TGetSlackIntegrationChannelsDTO, + TReinstallSlackIntegrationDTO, + TUpdateSlackIntegrationDTO +} from "./slack-types"; + +type TSlackServiceFactoryDep = { + slackIntegrationDAL: Pick< + TSlackIntegrationDALFactory, + | "deleteById" + | "updateById" + | "create" + | "findByIdWithWorkflowIntegrationDetails" + | "findWithWorkflowIntegrationDetails" + >; + permissionService: Pick; + kmsService: Pick; + workflowIntegrationDAL: Pick; +}; + +export type TSlackServiceFactory = ReturnType; + +export const slackServiceFactory = ({ + permissionService, + slackIntegrationDAL, + kmsService, + workflowIntegrationDAL +}: TSlackServiceFactoryDep) => { + const completeSlackIntegration = async ({ + orgId, + slug, + description, + teamId, + teamName, + slackUserId, + slackAppId, + botAccessToken, + slackBotId, + slackBotUserId + }: TCompleteSlackIntegrationDTO) => { + const { encryptor: orgDataKeyEncryptor } = await kmsService.createCipherPairWithDataKey({ + orgId, + type: KmsDataKey.Organization + }); + + const { cipherTextBlob: encryptedBotAccessToken } = orgDataKeyEncryptor({ + plainText: Buffer.from(botAccessToken, "utf8") + }); + + await workflowIntegrationDAL.transaction(async (tx) => { + const workflowIntegration = await workflowIntegrationDAL.create( + { + description, + orgId, + slug, + integration: WorkflowIntegration.SLACK + }, + tx + ); + + await slackIntegrationDAL.create( + { + // @ts-expect-error id is kept as fixed because it is always equal to the workflow integration ID + id: workflowIntegration.id, + teamId, + teamName, + slackUserId, + slackAppId, + slackBotId, + slackBotUserId, + encryptedBotAccessToken + }, + tx + ); + }); + }; + + const reinstallSlackIntegration = async ({ + id, + teamId, + teamName, + slackUserId, + slackAppId, + botAccessToken, + slackBotId, + slackBotUserId + }: TReinstallSlackIntegrationDTO) => { + const slackIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails(id); + + if (!slackIntegration) { + throw new NotFoundError({ + message: `Slack integration with ID ${id} not found` + }); + } + + const { encryptor: orgDataKeyEncryptor } = await kmsService.createCipherPairWithDataKey({ + orgId: slackIntegration.orgId, + type: KmsDataKey.Organization + }); + + const { cipherTextBlob: encryptedBotAccessToken } = orgDataKeyEncryptor({ + plainText: Buffer.from(botAccessToken, "utf8") + }); + + await slackIntegrationDAL.updateById(id, { + teamId, + teamName, + slackUserId, + slackAppId, + slackBotId, + slackBotUserId, + encryptedBotAccessToken + }); + }; + + const getSlackInstaller = async () => { + const appCfg = getConfig(); + const serverCfg = await getServerCfg(); + + let slackClientId = appCfg.WORKFLOW_SLACK_CLIENT_ID as string; + let slackClientSecret = appCfg.WORKFLOW_SLACK_CLIENT_SECRET as string; + + const decrypt = kmsService.decryptWithRootKey(); + + if (serverCfg.encryptedSlackClientId) { + slackClientId = decrypt(Buffer.from(serverCfg.encryptedSlackClientId)).toString(); + } + + if (serverCfg.encryptedSlackClientSecret) { + slackClientSecret = decrypt(Buffer.from(serverCfg.encryptedSlackClientSecret)).toString(); + } + + if (!slackClientId || !slackClientSecret) { + throw new BadRequestError({ + message: `Invalid Slack configuration. ${ + appCfg.isCloud + ? "Please contact the Infisical team." + : "Contact your instance admin to setup Slack integration in the Admin settings. Your configuration is missing Slack client ID and secret." + }` + }); + } + + return new InstallProvider({ + clientId: slackClientId, + clientSecret: slackClientSecret, + stateSecret: appCfg.AUTH_SECRET, + legacyStateVerification: true, + installationStore: { + storeInstallation: async (installation) => { + if (installation.isEnterpriseInstall && installation.enterprise?.id) { + throw new BadRequestError({ + message: "Enterprise not yet supported" + }); + } + + const metadata = JSON.parse(installation.metadata || "") as { + id?: string; + orgId: string; + slug: string; + description?: string; + }; + + if (metadata.id) { + return reinstallSlackIntegration({ + id: metadata.id, + teamId: installation.team?.id || "", + teamName: installation.team?.name || "", + slackUserId: installation.user.id, + slackAppId: installation.appId || "", + botAccessToken: installation.bot?.token || "", + slackBotId: installation.bot?.id || "", + slackBotUserId: installation.bot?.userId || "" + }); + } + + return completeSlackIntegration({ + orgId: metadata.orgId, + slug: metadata.slug, + description: metadata.description, + teamId: installation.team?.id || "", + teamName: installation.team?.name || "", + slackUserId: installation.user.id, + slackAppId: installation.appId || "", + botAccessToken: installation.bot?.token || "", + slackBotId: installation.bot?.id || "", + slackBotUserId: installation.bot?.userId || "" + }); + }, + // for our use-case we don't need to implement this because this will only be used + // when listening for events from slack + fetchInstallation: () => { + return {} as never; + }, + // for our use-case we don't need to implement this yet + deleteInstallation: () => { + return {} as never; + } + } + }); + }; + + const getInstallUrl = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + slug, + description + }: TGetSlackInstallUrlDTO) => { + const appCfg = getConfig(); + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings); + + const installer = await getSlackInstaller(); + const url = await installer.generateInstallUrl({ + scopes: ["chat:write.public", "chat:write", "channels:read", "groups:read"], + metadata: JSON.stringify({ + slug, + description, + orgId: actorOrgId + }), + redirectUri: `${appCfg.SITE_URL}/api/v1/workflow-integrations/slack/oauth_redirect` + }); + + return url; + }; + + const getReinstallUrl = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TGetReinstallUrlDTO) => { + const appCfg = getConfig(); + const slackIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails(id); + + if (!slackIntegration) { + throw new NotFoundError({ + message: `Slack integration with ID ${id} not found` + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + slackIntegration.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings); + + const installer = await getSlackInstaller(); + const url = await installer.generateInstallUrl({ + scopes: ["chat:write.public", "chat:write", "channels:read", "groups:read"], + metadata: JSON.stringify({ + id, + orgId: slackIntegration.orgId + }), + redirectUri: `${appCfg.SITE_URL}/api/v1/workflow-integrations/slack/oauth_redirect` + }); + + return url; + }; + + const getSlackIntegrationsByOrg = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod + }: TGetSlackIntegrationByOrgDTO) => { + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings); + + const slackIntegrations = await slackIntegrationDAL.findWithWorkflowIntegrationDetails({ + orgId: actorOrgId + }); + + return slackIntegrations; + }; + + const getSlackIntegrationById = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + id + }: TGetSlackIntegrationByIdDTO) => { + const slackIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails(id); + if (!slackIntegration) { + throw new NotFoundError({ + message: "Slack integration not found." + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + slackIntegration.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings); + + return slackIntegration; + }; + + const getSlackIntegrationChannels = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + id + }: TGetSlackIntegrationChannelsDTO) => { + const slackIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails(id); + if (!slackIntegration) { + throw new NotFoundError({ + message: `Slack integration with ID ${id} not found` + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + slackIntegration.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings); + + const { decryptor: orgDataKeyDecryptor } = await kmsService.createCipherPairWithDataKey({ + orgId: slackIntegration.orgId, + type: KmsDataKey.Organization + }); + + const botKey = orgDataKeyDecryptor({ + cipherTextBlob: slackIntegration.encryptedBotAccessToken + }).toString("utf8"); + + return fetchSlackChannels(botKey); + }; + + const updateSlackIntegration = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + id, + slug, + description + }: TUpdateSlackIntegrationDTO) => { + const slackIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails(id); + if (!slackIntegration) { + throw new NotFoundError({ + message: `Slack integration with ID ${id} not found` + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + slackIntegration.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings); + + return workflowIntegrationDAL.transaction(async (tx) => { + await workflowIntegrationDAL.updateById( + slackIntegration.id, + { + slug, + description + }, + tx + ); + + const updatedIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails( + slackIntegration.id, + tx + ); + + return updatedIntegration!; + }); + }; + + const deleteSlackIntegration = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + id + }: TDeleteSlackIntegrationDTO) => { + const slackIntegration = await slackIntegrationDAL.findByIdWithWorkflowIntegrationDetails(id); + if (!slackIntegration) { + throw new NotFoundError({ + message: `Slack integration with ID ${id} not found` + }); + } + + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + slackIntegration.orgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Settings); + + await workflowIntegrationDAL.deleteById(id); + + return slackIntegration; + }; + + return { + getInstallUrl, + getReinstallUrl, + getSlackIntegrationsByOrg, + getSlackIntegrationById, + completeSlackIntegration, + getSlackInstaller, + updateSlackIntegration, + deleteSlackIntegration, + getSlackIntegrationChannels + }; +}; diff --git a/backend/src/services/slack/slack-types.ts b/backend/src/services/slack/slack-types.ts new file mode 100644 index 0000000000..a1914eee2e --- /dev/null +++ b/backend/src/services/slack/slack-types.ts @@ -0,0 +1,79 @@ +import { TOrgPermission } from "@app/lib/types"; + +export type TGetSlackInstallUrlDTO = { + slug: string; + description?: string; +} & Omit; + +export type TGetReinstallUrlDTO = { + id: string; +} & Omit; + +export type TGetSlackIntegrationByOrgDTO = Omit; + +export type TGetSlackIntegrationByIdDTO = { id: string } & Omit; + +export type TGetSlackIntegrationChannelsDTO = { id: string } & Omit; + +export type TUpdateSlackIntegrationDTO = { id: string; slug?: string; description?: string } & Omit< + TOrgPermission, + "orgId" +>; + +export type TDeleteSlackIntegrationDTO = { + id: string; +} & Omit; + +export type TCompleteSlackIntegrationDTO = { + orgId: string; + slug: string; + description?: string; + teamId: string; + teamName: string; + slackUserId: string; + slackAppId: string; + botAccessToken: string; + slackBotId: string; + slackBotUserId: string; +}; + +export type TReinstallSlackIntegrationDTO = { + id: string; + teamId: string; + teamName: string; + slackUserId: string; + slackAppId: string; + botAccessToken: string; + slackBotId: string; + slackBotUserId: string; +}; + +export enum SlackTriggerFeature { + SECRET_APPROVAL = "secret-approval", + ACCESS_REQUEST = "access-request" +} + +export type TSlackNotification = + | { + type: SlackTriggerFeature.SECRET_APPROVAL; + payload: { + userEmail: string; + environment: string; + secretPath: string; + requestId: string; + projectId: string; + }; + } + | { + type: SlackTriggerFeature.ACCESS_REQUEST; + payload: { + requesterFullName: string; + requesterEmail: string; + isTemporary: boolean; + secretPath: string; + environment: string; + projectName: string; + permissions: string[]; + approvalUrl: string; + }; + }; diff --git a/backend/src/services/smtp/smtp-service.ts b/backend/src/services/smtp/smtp-service.ts index 1fb89c5537..1f38babb37 100644 --- a/backend/src/services/smtp/smtp-service.ts +++ b/backend/src/services/smtp/smtp-service.ts @@ -5,6 +5,7 @@ import handlebars from "handlebars"; import { createTransport } from "nodemailer"; import SMTPTransport from "nodemailer/lib/smtp-transport"; +import { getConfig } from "@app/lib/config/env"; import { logger } from "@app/lib/logger"; export type TSmtpConfig = SMTPTransport.Options; @@ -12,7 +13,7 @@ export type TSmtpSendMail = { template: SmtpTemplates; subjectLine: string; recipients: string[]; - substitutions: unknown; + substitutions: object; }; export type TSmtpService = ReturnType; @@ -23,13 +24,20 @@ export enum SmtpTemplates { EmailMfa = "emailMfa.handlebars", UnlockAccount = "unlockAccount.handlebars", AccessApprovalRequest = "accessApprovalRequest.handlebars", + AccessSecretRequestBypassed = "accessSecretRequestBypassed.handlebars", + SecretApprovalRequestNeedsReview = "secretApprovalRequestNeedsReview.handlebars", HistoricalSecretList = "historicalSecretLeakIncident.handlebars", NewDeviceJoin = "newDevice.handlebars", OrgInvite = "organizationInvitation.handlebars", ResetPassword = "passwordReset.handlebars", SecretLeakIncident = "secretLeakIncident.handlebars", WorkspaceInvite = "workspaceInvitation.handlebars", - ScimUserProvisioned = "scimUserProvisioned.handlebars" + ScimUserProvisioned = "scimUserProvisioned.handlebars", + PkiExpirationAlert = "pkiExpirationAlert.handlebars", + IntegrationSyncFailed = "integrationSyncFailed.handlebars", + ExternalImportSuccessful = "externalImportSuccessful.handlebars", + ExternalImportFailed = "externalImportFailed.handlebars", + ExternalImportStarted = "externalImportStarted.handlebars" } export enum SmtpHost { @@ -46,9 +54,11 @@ export const smtpServiceFactory = (cfg: TSmtpConfig) => { const isSmtpOn = Boolean(cfg.host); const sendMail = async ({ substitutions, recipients, template, subjectLine }: TSmtpSendMail) => { + const appCfg = getConfig(); const html = await fs.readFile(path.resolve(__dirname, "./templates/", template), "utf8"); const temp = handlebars.compile(html); - const htmlToSend = temp(substitutions); + const htmlToSend = temp({ isCloud: appCfg.isCloud, siteUrl: appCfg.SITE_URL, ...substitutions }); + if (isSmtpOn) { await smtp.sendMail({ from: cfg.from, diff --git a/backend/src/services/smtp/templates/accessSecretRequestBypassed.handlebars b/backend/src/services/smtp/templates/accessSecretRequestBypassed.handlebars new file mode 100644 index 0000000000..3313d352f3 --- /dev/null +++ b/backend/src/services/smtp/templates/accessSecretRequestBypassed.handlebars @@ -0,0 +1,28 @@ + + + + + Secret Approval Request Policy Bypassed + + + +

Infisical

+

Secret Approval Request Bypassed

+

A secret approval request has been bypassed in the project "{{projectName}}".

+ +

+ {{requesterFullName}} ({{requesterEmail}}) has merged + a secret to environment {{environment}} at secret path {{secretPath}} + without obtaining the required approvals. +

+

+ The following reason was provided for bypassing the policy: + {{bypassReason}} +

+ +

+ To review this action, please visit the request panel + here. +

+ + \ No newline at end of file diff --git a/backend/src/services/smtp/templates/emailMfa.handlebars b/backend/src/services/smtp/templates/emailMfa.handlebars index 489c9dd309..936195c340 100644 --- a/backend/src/services/smtp/templates/emailMfa.handlebars +++ b/backend/src/services/smtp/templates/emailMfa.handlebars @@ -1,19 +1,19 @@ - - - + + + MFA Code - + - +

Infisical

Sign in attempt requires further verification

Your MFA code is below — enter it where you started signing in to Infisical.

{{code}}

The MFA code will be valid for 2 minutes.

-

Not you? Contact Infisical or your administrator immediately.

- +

Not you? Contact {{#if isCloud}}Infisical{{else}}your administrator{{/if}} immediately.

+ \ No newline at end of file diff --git a/backend/src/services/smtp/templates/externalImportFailed.handlebars b/backend/src/services/smtp/templates/externalImportFailed.handlebars new file mode 100644 index 0000000000..c7869af27a --- /dev/null +++ b/backend/src/services/smtp/templates/externalImportFailed.handlebars @@ -0,0 +1,21 @@ + + + + + + Import failed + + + +

An import from {{provider}} to Infisical has failed

+

An import from + {{provider}} + to Infisical has failed due to unforeseen circumstances. Please re-try your import, and if the issue persists, you + can contact the Infisical team at team@infisical.com. +

+ +

Error: {{error}}

+ + + + \ No newline at end of file diff --git a/backend/src/services/smtp/templates/externalImportStarted.handlebars b/backend/src/services/smtp/templates/externalImportStarted.handlebars new file mode 100644 index 0000000000..551f972cc5 --- /dev/null +++ b/backend/src/services/smtp/templates/externalImportStarted.handlebars @@ -0,0 +1,17 @@ + + + + + + Import in progress + + + +

An import from {{provider}} to Infisical is in progress

+

An import from + {{provider}} + to Infisical is in progress. The import process may take up to 30 minutes, and you will receive once the import + has finished or if it fails.

+ + + \ No newline at end of file diff --git a/backend/src/services/smtp/templates/externalImportSuccessful.handlebars b/backend/src/services/smtp/templates/externalImportSuccessful.handlebars new file mode 100644 index 0000000000..51a1c465e1 --- /dev/null +++ b/backend/src/services/smtp/templates/externalImportSuccessful.handlebars @@ -0,0 +1,14 @@ + + + + + + Import successful + + + +

An import from {{provider}} to Infisical was successful

+

An import from {{provider}} was successful. Your data is now available in Infisical.

+ + + \ No newline at end of file diff --git a/backend/src/services/smtp/templates/historicalSecretLeakIncident.handlebars b/backend/src/services/smtp/templates/historicalSecretLeakIncident.handlebars index 3cb517a573..0798538fbd 100644 --- a/backend/src/services/smtp/templates/historicalSecretLeakIncident.handlebars +++ b/backend/src/services/smtp/templates/historicalSecretLeakIncident.handlebars @@ -9,12 +9,12 @@

Infisical has uncovered {{numberOfSecrets}} secret(s) from historical commits to your repo

-

View leaked secrets

+

View leaked secrets

If these are production secrets, please rotate them immediately.

Once you have taken action, be sure to update the status of the risk in your Infisical + href="{{siteUrl}}">Infisical dashboard.

diff --git a/backend/src/services/smtp/templates/integrationSyncFailed.handlebars b/backend/src/services/smtp/templates/integrationSyncFailed.handlebars new file mode 100644 index 0000000000..5c5d766938 --- /dev/null +++ b/backend/src/services/smtp/templates/integrationSyncFailed.handlebars @@ -0,0 +1,31 @@ + + + + + + Integration Sync Failed + + + +

Infisical

+ +
+

{{count}} integration(s) failed to sync.

+ + View your project integrations. + +
+ +
+
+

Project: {{projectName}}

+

Environment: {{environment}}

+

Secret Path: {{secretPath}}

+
+ + {{#if syncMessage}} +

Reason: {{syncMessage}}

+ {{/if}} + + + \ No newline at end of file diff --git a/backend/src/services/smtp/templates/newDevice.handlebars b/backend/src/services/smtp/templates/newDevice.handlebars index 654bb1ba3d..6c7f2e9f62 100644 --- a/backend/src/services/smtp/templates/newDevice.handlebars +++ b/backend/src/services/smtp/templates/newDevice.handlebars @@ -1,19 +1,19 @@ - - - + + + Successful login for {{email}} from new device - + - +

Infisical

We're verifying a recent login for {{email}}:

Timestamp: {{timestamp}}

IP address: {{ip}}

User agent: {{userAgent}}

-

If you believe that this login is suspicious, please contact Infisical or reset your password immediately.

- +

If you believe that this login is suspicious, please contact {{#if isCloud}}Infisical{{else}}your administrator{{/if}} or reset your password immediately.

+ \ No newline at end of file diff --git a/backend/src/services/smtp/templates/organizationInvitation.handlebars b/backend/src/services/smtp/templates/organizationInvitation.handlebars index 024fca1321..3ee16ee374 100644 --- a/backend/src/services/smtp/templates/organizationInvitation.handlebars +++ b/backend/src/services/smtp/templates/organizationInvitation.handlebars @@ -9,7 +9,7 @@

Join your organization on Infisical

{{inviterFirstName}} ({{inviterUsername}}) has invited you to their Infisical organization — {{organizationName}}

- Join now + Join now

What is Infisical?

Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.

diff --git a/backend/src/services/smtp/templates/passwordReset.handlebars b/backend/src/services/smtp/templates/passwordReset.handlebars index 3b136e859e..6499a629c4 100644 --- a/backend/src/services/smtp/templates/passwordReset.handlebars +++ b/backend/src/services/smtp/templates/passwordReset.handlebars @@ -9,6 +9,6 @@

Reset your password

Someone requested a password reset.

Reset password -

If you didn't initiate this request, please contact us immediately at team@infisical.com

+

If you didn't initiate this request, please contact {{#if isCloud}}us immediately at team@infisical.com.{{else}}your administrator immediately.{{/if}}

\ No newline at end of file diff --git a/backend/src/services/smtp/templates/pkiExpirationAlert.handlebars b/backend/src/services/smtp/templates/pkiExpirationAlert.handlebars new file mode 100644 index 0000000000..77d2543aec --- /dev/null +++ b/backend/src/services/smtp/templates/pkiExpirationAlert.handlebars @@ -0,0 +1,31 @@ + + + + + Infisical CA/Certificate expiration notice + + +

Hello,

+

This is an automated alert for "{{alertName}}" triggered for CAs/Certificates expiring in + {{alertBeforeDays}} + days.

+ +

Expiring Items:

+
    + {{#each items}} +
  • + {{type}}: + {{friendlyName}} +
    Serial Number: + {{serialNumber}} +
    Expires On: + {{expiryDate}} +
  • + {{/each}} +
+ +

Please take necessary actions to renew these items before they expire.

+ +

For more details, please log in to your Infisical account and check your PKI management section.

+ + \ No newline at end of file diff --git a/backend/src/services/smtp/templates/secretApprovalRequestNeedsReview.handlebars b/backend/src/services/smtp/templates/secretApprovalRequestNeedsReview.handlebars new file mode 100644 index 0000000000..9dd6fe7470 --- /dev/null +++ b/backend/src/services/smtp/templates/secretApprovalRequestNeedsReview.handlebars @@ -0,0 +1,22 @@ + + + + + + Secret Change Approval Request + + + +

Hi {{firstName}},

+

New secret change requests are pending review.

+
+

You have a secret change request pending your review in project "{{projectName}}", in the "{{organizationName}}" + organization.

+ +

+ View the request and approve or deny it + here. +

+ + + \ No newline at end of file diff --git a/backend/src/services/smtp/templates/secretLeakIncident.handlebars b/backend/src/services/smtp/templates/secretLeakIncident.handlebars index 1bf2d3175e..c3c5f353a6 100644 --- a/backend/src/services/smtp/templates/secretLeakIncident.handlebars +++ b/backend/src/services/smtp/templates/secretLeakIncident.handlebars @@ -9,7 +9,7 @@

Infisical has uncovered {{numberOfSecrets}} secret(s) from your recent push

-

View leaked secrets

+

View leaked secrets

You are receiving this notification because one or more secret leaks have been detected in a recent commit pushed by {{pusher_name}} ({{pusher_email}}). If these are test secrets, please add `infisical-scan:ignore` at the end of the line containing the secret as comment @@ -18,7 +18,7 @@

If these are production secrets, please rotate them immediately.

Once you have taken action, be sure to update the status of the risk in your Infisical + href="{{siteUrl}}">Infisical dashboard.

diff --git a/backend/src/services/smtp/templates/signupEmailVerification.handlebars b/backend/src/services/smtp/templates/signupEmailVerification.handlebars index fc738d2023..3ba18619f3 100644 --- a/backend/src/services/smtp/templates/signupEmailVerification.handlebars +++ b/backend/src/services/smtp/templates/signupEmailVerification.handlebars @@ -11,7 +11,7 @@

Confirm your email address

Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.

{{code}}

-

Questions about setting up Infisical? Email us at support@infisical.com

+

Questions about setting up Infisical? {{#if isCloud}}Email us at support@infisical.com{{else}}Contact your administrator{{/if}}.

\ No newline at end of file diff --git a/backend/src/services/super-admin/super-admin-dal.ts b/backend/src/services/super-admin/super-admin-dal.ts index 64133ed7e1..d7d11a5d26 100644 --- a/backend/src/services/super-admin/super-admin-dal.ts +++ b/backend/src/services/super-admin/super-admin-dal.ts @@ -1,7 +1,83 @@ +import { Knex } from "knex"; + import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; +import { TableName, TSuperAdmin, TSuperAdminUpdate } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; import { ormify } from "@app/lib/knex"; export type TSuperAdminDALFactory = ReturnType; -export const superAdminDALFactory = (db: TDbClient) => ormify(db, TableName.SuperAdmin, {}); +export const superAdminDALFactory = (db: TDbClient) => { + const superAdminOrm = ormify(db, TableName.SuperAdmin); + + const findById = async (id: string, tx?: Knex) => { + const config = await (tx || db)(TableName.SuperAdmin) + .where(`${TableName.SuperAdmin}.id`, id) + .leftJoin(TableName.Organization, `${TableName.SuperAdmin}.defaultAuthOrgId`, `${TableName.Organization}.id`) + .leftJoin(TableName.SamlConfig, (qb) => { + qb.on(`${TableName.SamlConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn( + `${TableName.SamlConfig}.isActive`, + "=", + db.raw("true") + ); + }) + .leftJoin(TableName.OidcConfig, (qb) => { + qb.on(`${TableName.OidcConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn( + `${TableName.OidcConfig}.isActive`, + "=", + db.raw("true") + ); + }) + .select( + db.ref("*").withSchema(TableName.SuperAdmin) as unknown as keyof TSuperAdmin, + db.ref("slug").withSchema(TableName.Organization).as("defaultAuthOrgSlug"), + db.ref("authEnforced").withSchema(TableName.Organization).as("defaultAuthOrgAuthEnforced"), + db.raw(` + CASE + WHEN ${TableName.SamlConfig}."orgId" IS NOT NULL THEN 'saml' + WHEN ${TableName.OidcConfig}."orgId" IS NOT NULL THEN 'oidc' + ELSE NULL + END as "defaultAuthOrgAuthMethod" + `) + ) + .first(); + + if (!config) { + return null; + } + + return { + ...config, + defaultAuthOrgSlug: config?.defaultAuthOrgSlug || null + } as TSuperAdmin & { + defaultAuthOrgSlug: string | null; + defaultAuthOrgAuthEnforced?: boolean | null; + defaultAuthOrgAuthMethod?: string | null; + }; + }; + + const updateById = async (id: string, data: TSuperAdminUpdate, tx?: Knex) => { + const updatedConfig = await (superAdminOrm || tx).transaction(async (trx: Knex) => { + await superAdminOrm.updateById(id, data, trx); + const config = await findById(id, trx); + + if (!config) { + throw new DatabaseError({ + error: "Failed to find updated super admin config", + message: "Failed to update super admin config", + name: "UpdateById" + }); + } + + return config; + }); + + return updatedConfig; + }; + + return { + ...superAdminOrm, + findById, + updateById + }; +}; diff --git a/backend/src/services/super-admin/super-admin-service.ts b/backend/src/services/super-admin/super-admin-service.ts index bec8f3f377..8ef998ac3c 100644 --- a/backend/src/services/super-admin/super-admin-service.ts +++ b/backend/src/services/super-admin/super-admin-service.ts @@ -1,27 +1,45 @@ +import bcrypt from "bcrypt"; + import { TSuperAdmin, TSuperAdminUpdate } from "@app/db/schemas"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; -import { BadRequestError } from "@app/lib/errors"; +import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; +import { getUserPrivateKey } from "@app/lib/crypto/srp"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { TAuthLoginFactory } from "../auth/auth-login-service"; import { AuthMethod } from "../auth/auth-type"; +import { KMS_ROOT_CONFIG_UUID } from "../kms/kms-fns"; +import { TKmsRootConfigDALFactory } from "../kms/kms-root-config-dal"; +import { TKmsServiceFactory } from "../kms/kms-service"; +import { RootKeyEncryptionStrategy } from "../kms/kms-types"; import { TOrgServiceFactory } from "../org/org-service"; import { TUserDALFactory } from "../user/user-dal"; import { TSuperAdminDALFactory } from "./super-admin-dal"; -import { TAdminSignUpDTO } from "./super-admin-types"; +import { LoginMethod, TAdminGetUsersDTO, TAdminSignUpDTO } from "./super-admin-types"; type TSuperAdminServiceFactoryDep = { serverCfgDAL: TSuperAdminDALFactory; userDAL: TUserDALFactory; authService: Pick; + kmsService: Pick; + kmsRootConfigDAL: TKmsRootConfigDALFactory; orgService: Pick; keyStore: Pick; + licenseService: Pick; }; export type TSuperAdminServiceFactory = ReturnType; // eslint-disable-next-line -export let getServerCfg: () => Promise; +export let getServerCfg: () => Promise< + TSuperAdmin & { + defaultAuthOrgSlug: string | null; + defaultAuthOrgAuthEnforced?: boolean | null; + defaultAuthOrgAuthMethod?: string | null; + } +>; const ADMIN_CONFIG_KEY = "infisical-admin-cfg"; const ADMIN_CONFIG_KEY_EXP = 60; // 60s @@ -32,22 +50,29 @@ export const superAdminServiceFactory = ({ userDAL, authService, orgService, - keyStore + keyStore, + kmsRootConfigDAL, + kmsService, + licenseService }: TSuperAdminServiceFactoryDep) => { const initServerCfg = async () => { // TODO(akhilmhdh): bad pattern time less change this later to me itself getServerCfg = async () => { const config = await keyStore.getItem(ADMIN_CONFIG_KEY); + // missing in keystore means fetch from db if (!config) { const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID); - if (serverCfg) { - await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(serverCfg)); // insert it back to keystore + + if (!serverCfg) { + throw new NotFoundError({ message: "Admin config not found" }); } + + await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(serverCfg)); // insert it back to keystore return serverCfg; } - const keyStoreServerCfg = JSON.parse(config) as TSuperAdmin; + const keyStoreServerCfg = JSON.parse(config) as TSuperAdmin & { defaultAuthOrgSlug: string | null }; return { ...keyStoreServerCfg, // this is to allow admin router to work @@ -61,14 +86,72 @@ export const superAdminServiceFactory = ({ const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID); if (serverCfg) return; - // @ts-expect-error id is kept as fixed for idempotence and to avoid race condition - const newCfg = await serverCfgDAL.create({ initialized: false, allowSignUp: true, id: ADMIN_CONFIG_DB_UUID }); + const newCfg = await serverCfgDAL.create({ + // @ts-expect-error id is kept as fixed for idempotence and to avoid race condition + id: ADMIN_CONFIG_DB_UUID, + initialized: false, + allowSignUp: true, + defaultAuthOrgId: null + }); return newCfg; }; - const updateServerCfg = async (data: TSuperAdminUpdate) => { - const updatedServerCfg = await serverCfgDAL.updateById(ADMIN_CONFIG_DB_UUID, data); + const updateServerCfg = async ( + data: TSuperAdminUpdate & { slackClientId?: string; slackClientSecret?: string }, + userId: string + ) => { + const updatedData = data; + + if (data.enabledLoginMethods) { + const superAdminUser = await userDAL.findById(userId); + const loginMethodToAuthMethod = { + [LoginMethod.EMAIL]: [AuthMethod.EMAIL], + [LoginMethod.GOOGLE]: [AuthMethod.GOOGLE], + [LoginMethod.GITLAB]: [AuthMethod.GITLAB], + [LoginMethod.GITHUB]: [AuthMethod.GITHUB], + [LoginMethod.LDAP]: [AuthMethod.LDAP], + [LoginMethod.OIDC]: [AuthMethod.OIDC], + [LoginMethod.SAML]: [ + AuthMethod.AZURE_SAML, + AuthMethod.GOOGLE_SAML, + AuthMethod.JUMPCLOUD_SAML, + AuthMethod.KEYCLOAK_SAML, + AuthMethod.OKTA_SAML + ] + }; + + if ( + !data.enabledLoginMethods.some((loginMethod) => + loginMethodToAuthMethod[loginMethod as LoginMethod].some( + (authMethod) => superAdminUser.authMethods?.includes(authMethod) + ) + ) + ) { + throw new BadRequestError({ + message: "You must configure at least one auth method to prevent account lockout" + }); + } + } + + const encryptWithRoot = kmsService.encryptWithRootKey(); + if (data.slackClientId) { + const encryptedClientId = encryptWithRoot(Buffer.from(data.slackClientId)); + + updatedData.encryptedSlackClientId = encryptedClientId; + updatedData.slackClientId = undefined; + } + + if (data.slackClientSecret) { + const encryptedClientSecret = encryptWithRoot(Buffer.from(data.slackClientSecret)); + + updatedData.encryptedSlackClientSecret = encryptedClientSecret; + updatedData.slackClientSecret = undefined; + } + + const updatedServerCfg = await serverCfgDAL.updateById(ADMIN_CONFIG_DB_UUID, updatedData); + await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(updatedServerCfg)); + return updatedServerCfg; }; @@ -77,6 +160,7 @@ export const superAdminServiceFactory = ({ firstName, salt, email, + password, verifier, publicKey, protectedKey, @@ -90,8 +174,20 @@ export const superAdminServiceFactory = ({ }: TAdminSignUpDTO) => { const appCfg = getConfig(); const existingUser = await userDAL.findOne({ email }); - if (existingUser) throw new BadRequestError({ name: "Admin sign up", message: "User already exist" }); + if (existingUser) throw new BadRequestError({ name: "Admin sign up", message: "User already exists" }); + const privateKey = await getUserPrivateKey(password, { + encryptionVersion: 2, + salt, + protectedKey, + protectedKeyIV, + protectedKeyTag, + encryptedPrivateKey, + iv: encryptedPrivateKeyIV, + tag: encryptedPrivateKeyTag + }); + const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND); + const { iv, tag, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey); const userInfo = await userDAL.transaction(async (tx) => { const newUser = await userDAL.create( { @@ -119,7 +215,12 @@ export const superAdminServiceFactory = ({ iv: encryptedPrivateKeyIV, tag: encryptedPrivateKeyTag, verifier, - userId: newUser.id + userId: newUser.id, + hashedPassword, + serverEncryptedPrivateKey: ciphertext, + serverEncryptedPrivateKeyIV: iv, + serverEncryptedPrivateKeyTag: tag, + serverEncryptedPrivateKeyEncoding: encoding }, tx ); @@ -134,7 +235,7 @@ export const superAdminServiceFactory = ({ orgName: initialOrganizationName }); - await updateServerCfg({ initialized: true }); + await updateServerCfg({ initialized: true }, userInfo.user.id); const token = await authService.generateUserTokens({ user: userInfo.user, authMethod: AuthMethod.EMAIL, @@ -146,9 +247,116 @@ export const superAdminServiceFactory = ({ return { token, user: userInfo, organization }; }; + const getUsers = ({ offset, limit, searchTerm }: TAdminGetUsersDTO) => { + return userDAL.getUsersByFilter({ + limit, + offset, + searchTerm, + sortBy: "username" + }); + }; + + const deleteUser = async (userId: string) => { + if (!licenseService.onPremFeatures?.instanceUserManagement) { + throw new BadRequestError({ + message: "Failed to delete user due to plan restriction. Upgrade to Infisical's Pro plan." + }); + } + + const user = await userDAL.deleteById(userId); + return user; + }; + + const getAdminSlackConfig = async () => { + const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID); + + if (!serverCfg) { + throw new NotFoundError({ name: "AdminConfig", message: "Admin config not found" }); + } + + let clientId = ""; + let clientSecret = ""; + + const decrypt = kmsService.decryptWithRootKey(); + + if (serverCfg.encryptedSlackClientId) { + clientId = decrypt(serverCfg.encryptedSlackClientId).toString(); + } + + if (serverCfg.encryptedSlackClientSecret) { + clientSecret = decrypt(serverCfg.encryptedSlackClientSecret).toString(); + } + + return { + clientId, + clientSecret + }; + }; + + const getConfiguredEncryptionStrategies = async () => { + const appCfg = getConfig(); + + const kmsRootCfg = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID); + + if (!kmsRootCfg) { + throw new NotFoundError({ name: "KmsRootConfig", message: "KMS root configuration not found" }); + } + + const selectedStrategy = kmsRootCfg.encryptionStrategy; + const enabledStrategies: { enabled: boolean; strategy: RootKeyEncryptionStrategy }[] = []; + + if (appCfg.ROOT_ENCRYPTION_KEY || appCfg.ENCRYPTION_KEY) { + const basicStrategy = RootKeyEncryptionStrategy.Software; + + enabledStrategies.push({ + enabled: selectedStrategy === basicStrategy, + strategy: basicStrategy + }); + } + if (appCfg.isHsmConfigured) { + const hsmStrategy = RootKeyEncryptionStrategy.HSM; + + enabledStrategies.push({ + enabled: selectedStrategy === hsmStrategy, + strategy: hsmStrategy + }); + } + + return { + strategies: enabledStrategies + }; + }; + + const updateRootEncryptionStrategy = async (strategy: RootKeyEncryptionStrategy) => { + if (!licenseService.onPremFeatures.hsm) { + throw new BadRequestError({ + message: "Failed to update encryption strategy due to plan restriction. Upgrade to Infisical's Enterprise plan." + }); + } + + const configuredStrategies = await getConfiguredEncryptionStrategies(); + + const foundStrategy = configuredStrategies.strategies.find((s) => s.strategy === strategy); + + if (!foundStrategy) { + throw new BadRequestError({ message: "Invalid encryption strategy" }); + } + + if (foundStrategy.enabled) { + throw new BadRequestError({ message: "The selected encryption strategy is already enabled" }); + } + + await kmsService.updateEncryptionStrategy(strategy); + }; + return { initServerCfg, updateServerCfg, - adminSignUp + adminSignUp, + getUsers, + deleteUser, + getAdminSlackConfig, + updateRootEncryptionStrategy, + getConfiguredEncryptionStrategies }; }; diff --git a/backend/src/services/super-admin/super-admin-types.ts b/backend/src/services/super-admin/super-admin-types.ts index e586946f2d..2d10941b48 100644 --- a/backend/src/services/super-admin/super-admin-types.ts +++ b/backend/src/services/super-admin/super-admin-types.ts @@ -1,5 +1,6 @@ export type TAdminSignUpDTO = { email: string; + password: string; publicKey: string; salt: string; lastName?: string; @@ -14,3 +15,19 @@ export type TAdminSignUpDTO = { ip: string; userAgent: string; }; + +export type TAdminGetUsersDTO = { + offset: number; + limit: number; + searchTerm: string; +}; + +export enum LoginMethod { + EMAIL = "email", + GOOGLE = "google", + GITHUB = "github", + GITLAB = "gitlab", + SAML = "saml", + LDAP = "ldap", + OIDC = "oidc" +} diff --git a/backend/src/services/telemetry/telemetry-queue.ts b/backend/src/services/telemetry/telemetry-queue.ts index 02e906fe69..c3e5471b08 100644 --- a/backend/src/services/telemetry/telemetry-queue.ts +++ b/backend/src/services/telemetry/telemetry-queue.ts @@ -48,7 +48,7 @@ export const telemetryQueueServiceFactory = ({ await keyStore.deleteItem(TELEMETRY_SECRET_OPERATIONS_KEY); }); - // every day at midnight a telemetry job executes on self hosted + // every day at midnight a telemetry job executes on self-hosted instances // this sends some telemetry information like instance id secrets operated etc const startTelemetryCheck = async () => { // this is a fast way to check its cloud or not diff --git a/backend/src/services/telemetry/telemetry-service.ts b/backend/src/services/telemetry/telemetry-service.ts index 9912e0101c..e02cff8a49 100644 --- a/backend/src/services/telemetry/telemetry-service.ts +++ b/backend/src/services/telemetry/telemetry-service.ts @@ -62,7 +62,7 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme const sendPostHogEvents = async (event: TPostHogEvent) => { if (postHog) { const instanceType = licenseService.getInstanceType(); - // capture posthog only when its cloud or signup event happens in self hosted + // capture posthog only when its cloud or signup event happens in self-hosted if (instanceType === InstanceType.Cloud || event.event === PostHogEventTypes.UserSignedUp) { postHog.capture({ event: event.event, diff --git a/backend/src/services/telemetry/telemetry-types.ts b/backend/src/services/telemetry/telemetry-types.ts index b168fe5d73..ddeb24211e 100644 --- a/backend/src/services/telemetry/telemetry-types.ts +++ b/backend/src/services/telemetry/telemetry-types.ts @@ -100,7 +100,9 @@ export type TIntegrationCreatedEvent = { export type TUserOrgInvitedEvent = { event: PostHogEventTypes.UserOrgInvitation; properties: { - inviteeEmail: string; + inviteeEmails: string[]; + projectIds?: string[]; + organizationRoleSlug?: string; }; }; diff --git a/backend/src/services/totp/totp-config-dal.ts b/backend/src/services/totp/totp-config-dal.ts new file mode 100644 index 0000000000..15abb729a2 --- /dev/null +++ b/backend/src/services/totp/totp-config-dal.ts @@ -0,0 +1,11 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TTotpConfigDALFactory = ReturnType; + +export const totpConfigDALFactory = (db: TDbClient) => { + const totpConfigDal = ormify(db, TableName.TotpConfig); + + return totpConfigDal; +}; diff --git a/backend/src/services/totp/totp-fns.ts b/backend/src/services/totp/totp-fns.ts new file mode 100644 index 0000000000..9e9aae52c5 --- /dev/null +++ b/backend/src/services/totp/totp-fns.ts @@ -0,0 +1,3 @@ +import crypto from "node:crypto"; + +export const generateRecoveryCode = () => String(crypto.randomInt(10 ** 7, 10 ** 8 - 1)); diff --git a/backend/src/services/totp/totp-service.ts b/backend/src/services/totp/totp-service.ts new file mode 100644 index 0000000000..591a66ed6a --- /dev/null +++ b/backend/src/services/totp/totp-service.ts @@ -0,0 +1,270 @@ +import { authenticator } from "otplib"; + +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; + +import { TKmsServiceFactory } from "../kms/kms-service"; +import { TUserDALFactory } from "../user/user-dal"; +import { TTotpConfigDALFactory } from "./totp-config-dal"; +import { generateRecoveryCode } from "./totp-fns"; +import { + TCreateUserTotpRecoveryCodesDTO, + TDeleteUserTotpConfigDTO, + TGetUserTotpConfigDTO, + TRegisterUserTotpDTO, + TVerifyUserTotpConfigDTO, + TVerifyUserTotpDTO, + TVerifyWithUserRecoveryCodeDTO +} from "./totp-types"; + +type TTotpServiceFactoryDep = { + userDAL: TUserDALFactory; + totpConfigDAL: TTotpConfigDALFactory; + kmsService: TKmsServiceFactory; +}; + +export type TTotpServiceFactory = ReturnType; + +const MAX_RECOVERY_CODE_LIMIT = 10; + +export const totpServiceFactory = ({ totpConfigDAL, kmsService, userDAL }: TTotpServiceFactoryDep) => { + const getUserTotpConfig = async ({ userId }: TGetUserTotpConfigDTO) => { + const totpConfig = await totpConfigDAL.findOne({ + userId + }); + + if (!totpConfig) { + throw new NotFoundError({ + message: "TOTP configuration not found" + }); + } + + if (!totpConfig.isVerified) { + throw new BadRequestError({ + message: "TOTP configuration has not been verified" + }); + } + + const decryptWithRoot = kmsService.decryptWithRootKey(); + const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(","); + + return { + isVerified: totpConfig.isVerified, + recoveryCodes + }; + }; + + const registerUserTotp = async ({ userId }: TRegisterUserTotpDTO) => { + const totpConfig = await totpConfigDAL.transaction(async (tx) => { + const verifiedTotpConfig = await totpConfigDAL.findOne( + { + userId, + isVerified: true + }, + tx + ); + + if (verifiedTotpConfig) { + throw new BadRequestError({ + message: "TOTP configuration for user already exists" + }); + } + + const unverifiedTotpConfig = await totpConfigDAL.findOne({ + userId, + isVerified: false + }); + + if (unverifiedTotpConfig) { + return unverifiedTotpConfig; + } + + const encryptWithRoot = kmsService.encryptWithRootKey(); + + // create new TOTP configuration + const secret = authenticator.generateSecret(); + const encryptedSecret = encryptWithRoot(Buffer.from(secret)); + const recoveryCodes = Array.from({ length: MAX_RECOVERY_CODE_LIMIT }).map(generateRecoveryCode); + const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(recoveryCodes.join(","))); + const newTotpConfig = await totpConfigDAL.create({ + userId, + encryptedRecoveryCodes, + encryptedSecret + }); + + return newTotpConfig; + }); + + const user = await userDAL.findById(userId); + const decryptWithRoot = kmsService.decryptWithRootKey(); + + const secret = decryptWithRoot(totpConfig.encryptedSecret).toString(); + const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(","); + const otpUrl = authenticator.keyuri(user.username, "Infisical", secret); + + return { + otpUrl, + recoveryCodes + }; + }; + + const verifyUserTotpConfig = async ({ userId, totp }: TVerifyUserTotpConfigDTO) => { + const totpConfig = await totpConfigDAL.findOne({ + userId + }); + + if (!totpConfig) { + throw new NotFoundError({ + message: "TOTP configuration not found" + }); + } + + if (totpConfig.isVerified) { + throw new BadRequestError({ + message: "TOTP configuration has already been verified" + }); + } + + const decryptWithRoot = kmsService.decryptWithRootKey(); + const secret = decryptWithRoot(totpConfig.encryptedSecret).toString(); + const isValid = authenticator.verify({ + token: totp, + secret + }); + + if (isValid) { + await totpConfigDAL.updateById(totpConfig.id, { + isVerified: true + }); + } else { + throw new BadRequestError({ + message: "Invalid TOTP token" + }); + } + }; + + const verifyUserTotp = async ({ userId, totp }: TVerifyUserTotpDTO) => { + const totpConfig = await totpConfigDAL.findOne({ + userId + }); + + if (!totpConfig) { + throw new NotFoundError({ + message: "TOTP configuration not found" + }); + } + + if (!totpConfig.isVerified) { + throw new BadRequestError({ + message: "TOTP configuration has not been verified" + }); + } + + const decryptWithRoot = kmsService.decryptWithRootKey(); + const secret = decryptWithRoot(totpConfig.encryptedSecret).toString(); + const isValid = authenticator.verify({ + token: totp, + secret + }); + + if (!isValid) { + throw new ForbiddenRequestError({ + message: "Invalid TOTP" + }); + } + }; + + const verifyWithUserRecoveryCode = async ({ userId, recoveryCode }: TVerifyWithUserRecoveryCodeDTO) => { + const totpConfig = await totpConfigDAL.findOne({ + userId + }); + + if (!totpConfig) { + throw new NotFoundError({ + message: "TOTP configuration not found" + }); + } + + if (!totpConfig.isVerified) { + throw new BadRequestError({ + message: "TOTP configuration has not been verified" + }); + } + + const decryptWithRoot = kmsService.decryptWithRootKey(); + const encryptWithRoot = kmsService.encryptWithRootKey(); + + const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(","); + const matchingCode = recoveryCodes.find((code) => recoveryCode === code); + if (!matchingCode) { + throw new ForbiddenRequestError({ + message: "Invalid TOTP recovery code" + }); + } + + const updatedRecoveryCodes = recoveryCodes.filter((code) => code !== matchingCode); + const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(updatedRecoveryCodes.join(","))); + await totpConfigDAL.updateById(totpConfig.id, { + encryptedRecoveryCodes + }); + }; + + const deleteUserTotpConfig = async ({ userId }: TDeleteUserTotpConfigDTO) => { + const totpConfig = await totpConfigDAL.findOne({ + userId + }); + + if (!totpConfig) { + throw new NotFoundError({ + message: "TOTP configuration not found" + }); + } + + await totpConfigDAL.deleteById(totpConfig.id); + }; + + const createUserTotpRecoveryCodes = async ({ userId }: TCreateUserTotpRecoveryCodesDTO) => { + const decryptWithRoot = kmsService.decryptWithRootKey(); + const encryptWithRoot = kmsService.encryptWithRootKey(); + + return totpConfigDAL.transaction(async (tx) => { + const totpConfig = await totpConfigDAL.findOne( + { + userId, + isVerified: true + }, + tx + ); + + if (!totpConfig) { + throw new NotFoundError({ + message: "Valid TOTP configuration not found" + }); + } + + const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(","); + if (recoveryCodes.length >= MAX_RECOVERY_CODE_LIMIT) { + throw new BadRequestError({ + message: `Cannot have more than ${MAX_RECOVERY_CODE_LIMIT} recovery codes at a time` + }); + } + + const toGenerateCount = MAX_RECOVERY_CODE_LIMIT - recoveryCodes.length; + const newRecoveryCodes = Array.from({ length: toGenerateCount }).map(generateRecoveryCode); + const encryptedRecoveryCodes = encryptWithRoot(Buffer.from([...recoveryCodes, ...newRecoveryCodes].join(","))); + + await totpConfigDAL.updateById(totpConfig.id, { + encryptedRecoveryCodes + }); + }); + }; + + return { + registerUserTotp, + verifyUserTotpConfig, + getUserTotpConfig, + verifyUserTotp, + verifyWithUserRecoveryCode, + deleteUserTotpConfig, + createUserTotpRecoveryCodes + }; +}; diff --git a/backend/src/services/totp/totp-types.ts b/backend/src/services/totp/totp-types.ts new file mode 100644 index 0000000000..15c0156197 --- /dev/null +++ b/backend/src/services/totp/totp-types.ts @@ -0,0 +1,30 @@ +export type TRegisterUserTotpDTO = { + userId: string; +}; + +export type TVerifyUserTotpConfigDTO = { + userId: string; + totp: string; +}; + +export type TGetUserTotpConfigDTO = { + userId: string; +}; + +export type TVerifyUserTotpDTO = { + userId: string; + totp: string; +}; + +export type TVerifyWithUserRecoveryCodeDTO = { + userId: string; + recoveryCode: string; +}; + +export type TDeleteUserTotpConfigDTO = { + userId: string; +}; + +export type TCreateUserTotpRecoveryCodesDTO = { + userId: string; +}; diff --git a/backend/src/services/user-alias/user-alias-types.ts b/backend/src/services/user-alias/user-alias-types.ts index 09204644f4..7207e8acf5 100644 --- a/backend/src/services/user-alias/user-alias-types.ts +++ b/backend/src/services/user-alias/user-alias-types.ts @@ -1,4 +1,5 @@ export enum UserAliasType { LDAP = "ldap", - SAML = "saml" + SAML = "saml", + OIDC = "oidc" } diff --git a/backend/src/services/user-engagement/user-engagement-service.ts b/backend/src/services/user-engagement/user-engagement-service.ts new file mode 100644 index 0000000000..5d7b549299 --- /dev/null +++ b/backend/src/services/user-engagement/user-engagement-service.ts @@ -0,0 +1,89 @@ +import { PlainClient } from "@team-plain/typescript-sdk"; + +import { getConfig } from "@app/lib/config/env"; +import { InternalServerError } from "@app/lib/errors"; + +import { TUserDALFactory } from "../user/user-dal"; + +type TUserEngagementServiceFactoryDep = { + userDAL: Pick; +}; + +export type TUserEngagementServiceFactory = ReturnType; + +export const userEngagementServiceFactory = ({ userDAL }: TUserEngagementServiceFactoryDep) => { + const createUserWish = async (userId: string, text: string) => { + const user = await userDAL.findById(userId); + const appCfg = getConfig(); + + if (!appCfg.PLAIN_API_KEY) { + throw new InternalServerError({ + message: "Plain is not configured." + }); + } + + const client = new PlainClient({ + apiKey: appCfg.PLAIN_API_KEY + }); + + const customerUpsertRes = await client.upsertCustomer({ + identifier: { + emailAddress: user.email + }, + onCreate: { + fullName: `${user.firstName} ${user.lastName}`, + shortName: user.firstName, + email: { + email: user.email as string, + isVerified: user.isEmailVerified as boolean + }, + + externalId: user.id + }, + + onUpdate: { + fullName: { + value: `${user.firstName} ${user.lastName}` + }, + shortName: { + value: user.firstName + }, + email: { + email: user.email as string, + isVerified: user.isEmailVerified as boolean + }, + externalId: { + value: user.id + } + } + }); + + if (customerUpsertRes.error) { + throw new InternalServerError({ message: customerUpsertRes.error.message }); + } + + const createThreadRes = await client.createThread({ + title: "Wish", + customerIdentifier: { + externalId: customerUpsertRes.data.customer.externalId + }, + components: [ + { + componentText: { + text + } + } + ], + labelTypeIds: appCfg.PLAIN_WISH_LABEL_IDS?.split(",") + }); + + if (createThreadRes.error) { + throw new InternalServerError({ + message: createThreadRes.error.message + }); + } + }; + return { + createUserWish + }; +}; diff --git a/backend/src/services/user/user-dal.ts b/backend/src/services/user/user-dal.ts index f2da0df0e2..99f403e841 100644 --- a/backend/src/services/user/user-dal.ts +++ b/backend/src/services/user/user-dal.ts @@ -7,10 +7,11 @@ import { TUserActionsUpdate, TUserEncryptionKeys, TUserEncryptionKeysInsert, - TUserEncryptionKeysUpdate + TUserEncryptionKeysUpdate, + TUsers } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { ormify } from "@app/lib/knex"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; export type TUserDALFactory = ReturnType; @@ -18,11 +19,45 @@ export const userDALFactory = (db: TDbClient) => { const userOrm = ormify(db, TableName.Users); const findUserByUsername = async (username: string, tx?: Knex) => userOrm.findOne({ username }, tx); + const getUsersByFilter = async ({ + limit, + offset, + searchTerm, + sortBy + }: { + limit: number; + offset: number; + searchTerm: string; + sortBy?: keyof TUsers; + }) => { + try { + let query = db.replicaNode()(TableName.Users).where("isGhost", "=", false); + if (searchTerm) { + query = query.where((qb) => { + void qb + .whereILike("email", `%${searchTerm}%`) + .orWhereILike("firstName", `%${searchTerm}%`) + .orWhereILike("lastName", `%${searchTerm}%`) + .orWhereLike("username", `%${searchTerm}%`); + }); + } + + if (sortBy) { + query = query.orderBy(sortBy); + } + + return await query.limit(limit).offset(offset).select(selectAllTableCols(TableName.Users)); + } catch (error) { + throw new DatabaseError({ error, name: "Get users by filter" }); + } + }; + // USER ENCRYPTION FUNCTIONS // ------------------------- const findUserEncKeyByUsername = async ({ username }: { username: string }) => { try { - return await db(TableName.Users) + return await db + .replicaNode()(TableName.Users) .where({ username, isGhost: false @@ -36,7 +71,7 @@ export const userDALFactory = (db: TDbClient) => { const findUserEncKeyByUserIdsBatch = async ({ userIds }: { userIds: string[] }, tx?: Knex) => { try { - return await (tx || db)(TableName.Users) + return await (tx || db.replicaNode())(TableName.Users) .where({ isGhost: false }) @@ -47,9 +82,9 @@ export const userDALFactory = (db: TDbClient) => { } }; - const findUserEncKeyByUserId = async (userId: string) => { + const findUserEncKeyByUserId = async (userId: string, tx?: Knex) => { try { - const user = await db(TableName.Users) + const user = await (tx || db.replicaNode())(TableName.Users) .where(`${TableName.Users}.id`, userId) .join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`) .first(); @@ -65,7 +100,8 @@ export const userDALFactory = (db: TDbClient) => { const findUserByProjectMembershipId = async (projectMembershipId: string) => { try { - return await db(TableName.ProjectMembership) + return await db + .replicaNode()(TableName.ProjectMembership) .where({ [`${TableName.ProjectMembership}.id` as "id"]: projectMembershipId }) .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) .first(); @@ -76,7 +112,8 @@ export const userDALFactory = (db: TDbClient) => { const findUsersByProjectMembershipIds = async (projectMembershipIds: string[]) => { try { - return await db(TableName.ProjectMembership) + return await db + .replicaNode()(TableName.ProjectMembership) .whereIn(`${TableName.ProjectMembership}.id`, projectMembershipIds) .join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`) .select("*"); @@ -128,7 +165,7 @@ export const userDALFactory = (db: TDbClient) => { // --------------------- const findOneUserAction = (filter: TUserActionsUpdate, tx?: Knex) => { try { - return (tx || db)(TableName.UserAction).where(filter).first("*"); + return (tx || db.replicaNode())(TableName.UserAction).where(filter).first("*"); } catch (error) { throw new DatabaseError({ error, name: "Find one user action" }); } @@ -155,6 +192,7 @@ export const userDALFactory = (db: TDbClient) => { upsertUserEncryptionKey, createUserEncryption, findOneUserAction, - createUserAction + createUserAction, + getUsersByFilter }; }; diff --git a/backend/src/services/user/user-fns.ts b/backend/src/services/user/user-fns.ts index 639320e243..23aff77a4d 100644 --- a/backend/src/services/user/user-fns.ts +++ b/backend/src/services/user/user-fns.ts @@ -4,18 +4,14 @@ import { alphaNumericNanoId } from "@app/lib/nanoid"; import { TUserDALFactory } from "@app/services/user/user-dal"; export const normalizeUsername = async (username: string, userDAL: Pick) => { - let attempt = slugify(`${username}-${alphaNumericNanoId(4)}`); + let attempt: string; + let user; - let user = await userDAL.findOne({ username: attempt }); - if (!user) return attempt; - - while (true) { + do { attempt = slugify(`${username}-${alphaNumericNanoId(4)}`); // eslint-disable-next-line no-await-in-loop user = await userDAL.findOne({ username: attempt }); + } while (user); - if (!user) { - return attempt; - } - } + return attempt; }; diff --git a/backend/src/services/user/user-service.ts b/backend/src/services/user/user-service.ts index a82259db6f..5da5d493c8 100644 --- a/backend/src/services/user/user-service.ts +++ b/backend/src/services/user/user-service.ts @@ -1,4 +1,10 @@ -import { BadRequestError } from "@app/lib/errors"; +import { ForbiddenError } from "@casl/ability"; + +import { SecretKeyEncoding } from "@app/db/schemas"; +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; +import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; import { TokenType } from "@app/services/auth-token/auth-token-types"; import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; @@ -6,7 +12,10 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; import { AuthMethod } from "../auth/auth-type"; +import { TGroupProjectDALFactory } from "../group-project/group-project-dal"; +import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal"; import { TUserDALFactory } from "./user-dal"; +import { TListUserGroupsDTO, TUpdateUserMfaDTO } from "./user-types"; type TUserServiceFactoryDep = { userDAL: Pick< @@ -21,11 +30,15 @@ type TUserServiceFactoryDep = { | "findOneUserAction" | "createUserAction" | "findUserEncKeyByUserId" + | "delete" >; userAliasDAL: Pick; - orgMembershipDAL: Pick; + groupProjectDAL: Pick; + orgMembershipDAL: Pick; tokenService: Pick; + projectMembershipDAL: Pick; smtpService: Pick; + permissionService: TPermissionServiceFactory; }; export type TUserServiceFactory = ReturnType; @@ -34,12 +47,15 @@ export const userServiceFactory = ({ userDAL, userAliasDAL, orgMembershipDAL, + projectMembershipDAL, + groupProjectDAL, tokenService, - smtpService + smtpService, + permissionService }: TUserServiceFactoryDep) => { const sendEmailVerificationCode = async (username: string) => { const user = await userDAL.findOne({ username }); - if (!user) throw new BadRequestError({ name: "Failed to find user" }); + if (!user) throw new NotFoundError({ name: `User with username '${username}' not found` }); if (!user.email) throw new BadRequestError({ name: "Failed to send email verification code due to no email on user" }); if (user.isEmailVerified) @@ -62,7 +78,7 @@ export const userServiceFactory = ({ const verifyEmailVerificationCode = async (username: string, code: string) => { const user = await userDAL.findOne({ username }); - if (!user) throw new BadRequestError({ name: "Failed to find user" }); + if (!user) throw new NotFoundError({ name: `User with username '${username}' not found` }); if (!user.email) throw new BadRequestError({ name: "Failed to verify email verification code due to no email on user" }); if (user.isEmailVerified) @@ -85,7 +101,7 @@ export const userServiceFactory = ({ tx ); - // check if there are users with the same email. + // check if there are verified users with the same email. const users = await userDAL.find( { email, @@ -97,7 +113,7 @@ export const userServiceFactory = ({ if (users.length > 1) { // merge users const mergeUser = users.find((u) => u.id !== user.id); - if (!mergeUser) throw new BadRequestError({ name: "Failed to find merge user" }); + if (!mergeUser) throw new NotFoundError({ name: "Failed to find merge user" }); const mergeUserOrgMembershipSet = new Set( (await orgMembershipDAL.find({ userId: mergeUser.id }, { tx })).map((m) => m.orgId) @@ -134,6 +150,15 @@ export const userServiceFactory = ({ ); } } else { + await userDAL.delete( + { + email, + isAccepted: false, + isEmailVerified: false + }, + tx + ); + // update current user's username to [email] await userDAL.updateById( user.id, @@ -146,15 +171,24 @@ export const userServiceFactory = ({ }); }; - const toggleUserMfa = async (userId: string, isMfaEnabled: boolean) => { + const updateUserMfa = async ({ userId, isMfaEnabled, selectedMfaMethod }: TUpdateUserMfaDTO) => { const user = await userDAL.findById(userId); if (!user || !user.email) throw new BadRequestError({ name: "Failed to toggle MFA" }); + let mfaMethods; + if (isMfaEnabled === undefined) { + mfaMethods = undefined; + } else { + mfaMethods = isMfaEnabled ? ["email"] : []; + } + const updatedUser = await userDAL.updateById(userId, { isMfaEnabled, - mfaMethods: isMfaEnabled ? ["email"] : [] + mfaMethods, + selectedMfaMethod }); + return updatedUser; }; @@ -168,13 +202,11 @@ export const userServiceFactory = ({ const updateAuthMethods = async (userId: string, authMethods: AuthMethod[]) => { const user = await userDAL.findById(userId); - if (!user) throw new BadRequestError({ name: "Update auth methods" }); + if (!user) throw new NotFoundError({ message: `User with ID '${userId}' not found`, name: "UpdateAuthMethods" }); - if (user.authMethods?.includes(AuthMethod.LDAP)) - throw new BadRequestError({ message: "LDAP auth method cannot be updated", name: "Update auth methods" }); - - if (authMethods.includes(AuthMethod.LDAP)) - throw new BadRequestError({ message: "LDAP auth method cannot be updated", name: "Update auth methods" }); + if (user.authMethods?.includes(AuthMethod.LDAP) || authMethods.includes(AuthMethod.LDAP)) { + throw new BadRequestError({ message: "LDAP auth method cannot be updated", name: "UpdateAuthMethods" }); + } const updatedUser = await userDAL.updateById(userId, { authMethods }); return updatedUser; @@ -182,11 +214,11 @@ export const userServiceFactory = ({ const getMe = async (userId: string) => { const user = await userDAL.findUserEncKeyByUserId(userId); - if (!user) throw new BadRequestError({ message: "user not found", name: "Get Me" }); + if (!user) throw new NotFoundError({ message: `User with ID '${userId}' not found`, name: "GetMe" }); return user; }; - const deleteMe = async (userId: string) => { + const deleteUser = async (userId: string) => { const user = await userDAL.deleteById(userId); return user; }; @@ -220,16 +252,101 @@ export const userServiceFactory = ({ ); }; + const getUserPrivateKey = async (userId: string) => { + const user = await userDAL.findUserEncKeyByUserId(userId); + if (!user?.serverEncryptedPrivateKey || !user.serverEncryptedPrivateKeyIV || !user.serverEncryptedPrivateKeyTag) { + throw new NotFoundError({ message: `Private key for user with ID '${userId}' not found` }); + } + const privateKey = infisicalSymmetricDecrypt({ + ciphertext: user.serverEncryptedPrivateKey, + tag: user.serverEncryptedPrivateKeyTag, + iv: user.serverEncryptedPrivateKeyIV, + keyEncoding: user.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding + }); + + return privateKey; + }; + + const getUserProjectFavorites = async (userId: string, orgId: string) => { + const orgMembership = await orgMembershipDAL.findOne({ + userId, + orgId + }); + + if (!orgMembership) { + throw new ForbiddenRequestError({ + message: "User does not belong in the organization." + }); + } + + return { projectFavorites: orgMembership.projectFavorites || [] }; + }; + + const updateUserProjectFavorites = async (userId: string, orgId: string, projectIds: string[]) => { + const orgMembership = await orgMembershipDAL.findOne({ + userId, + orgId + }); + + if (!orgMembership) { + throw new ForbiddenRequestError({ + message: "User does not belong in the organization." + }); + } + + const matchingUserProjectMemberships = await projectMembershipDAL.find({ + userId, + $in: { + projectId: projectIds + } + }); + + const memberProjectFavorites = matchingUserProjectMemberships.map( + (projectMembership) => projectMembership.projectId + ); + + const updatedOrgMembership = await orgMembershipDAL.updateById(orgMembership.id, { + projectFavorites: memberProjectFavorites + }); + + return updatedOrgMembership.projectFavorites; + }; + + const listUserGroups = async ({ username, actorOrgId, actor, actorId, actorAuthMethod }: TListUserGroupsDTO) => { + const user = await userDAL.findOne({ + username + }); + + // This makes it so the user can always read information about themselves, but no one else if they don't have the Members Read permission. + if (user.id !== actorId) { + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Member); + } + + const memberships = await groupProjectDAL.findByUserId(user.id, actorOrgId); + return memberships; + }; + return { sendEmailVerificationCode, verifyEmailVerificationCode, - toggleUserMfa, + updateUserMfa, updateUserName, updateAuthMethods, - deleteMe, + deleteUser, getMe, createUserAction, + listUserGroups, getUserAction, - unlockUser + unlockUser, + getUserPrivateKey, + getUserProjectFavorites, + updateUserProjectFavorites }; }; diff --git a/backend/src/services/user/user-types.ts b/backend/src/services/user/user-types.ts index e69de29bb2..cef13f27a7 100644 --- a/backend/src/services/user/user-types.ts +++ b/backend/src/services/user/user-types.ts @@ -0,0 +1,18 @@ +import { TOrgPermission } from "@app/lib/types"; + +import { MfaMethod } from "../auth/auth-type"; + +export type TListUserGroupsDTO = { + username: string; +} & Omit; + +export enum UserEncryption { + V1 = 1, + V2 = 2 +} + +export type TUpdateUserMfaDTO = { + userId: string; + isMfaEnabled?: boolean; + selectedMfaMethod?: MfaMethod; +}; diff --git a/backend/src/services/webhook/webhook-dal.ts b/backend/src/services/webhook/webhook-dal.ts index c33d79fdb1..ba27457376 100644 --- a/backend/src/services/webhook/webhook-dal.ts +++ b/backend/src/services/webhook/webhook-dal.ts @@ -20,9 +20,9 @@ export const webhookDALFactory = (db: TDbClient) => { .select(tx.ref("projectId").withSchema(TableName.Environment)) .select(selectAllTableCols(TableName.Webhook)); - const find = async (filter: Partial, tx?: Knex) => { + const find = async (filter: Partial, tx?: Knex) => { try { - const docs = await webhookFindQuery(tx || db, filter); + const docs = await webhookFindQuery(tx || db.replicaNode(), filter); return docs.map(({ envId, envSlug, envName, ...el }) => ({ ...el, envId, @@ -39,7 +39,7 @@ export const webhookDALFactory = (db: TDbClient) => { const findOne = async (filter: Partial, tx?: Knex) => { try { - const doc = await webhookFindQuery(tx || db, filter).first(); + const doc = await webhookFindQuery(tx || db.replicaNode(), filter).first(); if (!doc) return; const { envName: name, envSlug: slug, envId: id, ...el } = doc; @@ -51,7 +51,7 @@ export const webhookDALFactory = (db: TDbClient) => { const findById = async (id: string, tx?: Knex) => { try { - const doc = await webhookFindQuery(tx || db, { + const doc = await webhookFindQuery(tx || db.replicaNode(), { [`${TableName.Webhook}.id` as "id"]: id }).first(); if (!doc) return; @@ -65,7 +65,7 @@ export const webhookDALFactory = (db: TDbClient) => { const findAllWebhooks = async (projectId: string, environment?: string, secretPath?: string, tx?: Knex) => { try { - const webhooks = await (tx || db)(TableName.Webhook) + const webhooks = await (tx || db.replicaNode())(TableName.Webhook) .where(`${TableName.Environment}.projectId`, projectId) .where((qb) => { if (environment) { diff --git a/backend/src/services/webhook/webhook-fns.ts b/backend/src/services/webhook/webhook-fns.ts index 35d2ba7fcf..ffa4b4a046 100644 --- a/backend/src/services/webhook/webhook-fns.ts +++ b/backend/src/services/webhook/webhook-fns.ts @@ -4,71 +4,118 @@ import { AxiosError } from "axios"; import picomatch from "picomatch"; import { SecretKeyEncoding, TWebhooks } from "@app/db/schemas"; -import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; -import { decryptSymmetric, decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; +import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; +import { NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { TProjectDALFactory } from "../project/project-dal"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; import { TWebhookDALFactory } from "./webhook-dal"; +import { WebhookType } from "./webhook-types"; const WEBHOOK_TRIGGER_TIMEOUT = 15 * 1000; -export const triggerWebhookRequest = async ( - { url, encryptedSecretKey, iv, tag, keyEncoding }: TWebhooks, - data: Record -) => { - const headers: Record = {}; - const payload = { ...data, timestamp: Date.now() }; - const appCfg = getConfig(); + +export const decryptWebhookDetails = (webhook: TWebhooks) => { + const { keyEncoding, iv, encryptedSecretKey, tag, urlCipherText, urlIV, urlTag, url } = webhook; + + let decryptedSecretKey = ""; + let decryptedUrl = url; if (encryptedSecretKey) { - const encryptionKey = appCfg.ENCRYPTION_KEY; - const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY; - let secretKey; - if (rootEncryptionKey && keyEncoding === SecretKeyEncoding.BASE64) { - // case: encoding scheme is base64 - secretKey = decryptSymmetric({ - ciphertext: encryptedSecretKey, - iv: iv as string, - tag: tag as string, - key: rootEncryptionKey - }); - } else if (encryptionKey && keyEncoding === SecretKeyEncoding.UTF8) { - // case: encoding scheme is utf8 - secretKey = decryptSymmetric128BitHexKeyUTF8({ - ciphertext: encryptedSecretKey, - iv: iv as string, - tag: tag as string, - key: encryptionKey - }); - } - if (secretKey) { - const webhookSign = crypto.createHmac("sha256", secretKey).update(JSON.stringify(payload)).digest("hex"); - headers["x-infisical-signature"] = `t=${payload.timestamp};${webhookSign}`; - } + decryptedSecretKey = infisicalSymmetricDecrypt({ + keyEncoding: keyEncoding as SecretKeyEncoding, + ciphertext: encryptedSecretKey, + iv: iv as string, + tag: tag as string + }); } + + if (urlCipherText) { + decryptedUrl = infisicalSymmetricDecrypt({ + keyEncoding: keyEncoding as SecretKeyEncoding, + ciphertext: urlCipherText, + iv: urlIV as string, + tag: urlTag as string + }); + } + + return { + secretKey: decryptedSecretKey, + url: decryptedUrl + }; +}; + +export const triggerWebhookRequest = async (webhook: TWebhooks, data: Record) => { + const headers: Record = {}; + const payload = { ...data, timestamp: Date.now() }; + const { secretKey, url } = decryptWebhookDetails(webhook); + + if (secretKey) { + const webhookSign = crypto.createHmac("sha256", secretKey).update(JSON.stringify(payload)).digest("hex"); + headers["x-infisical-signature"] = `t=${payload.timestamp};${webhookSign}`; + } + const req = await request.post(url, payload, { headers, timeout: WEBHOOK_TRIGGER_TIMEOUT, signal: AbortSignal.timeout(WEBHOOK_TRIGGER_TIMEOUT) }); + return req; }; export const getWebhookPayload = ( eventName: string, - workspaceId: string, - environment: string, - secretPath?: string -) => ({ - event: eventName, - project: { - workspaceId, - environment, - secretPath + details: { + workspaceName: string; + workspaceId: string; + environment: string; + secretPath?: string; + type?: string | null; } -}); +) => { + const { workspaceName, workspaceId, environment, secretPath, type } = details; + + switch (type) { + case WebhookType.SLACK: + return { + text: "A secret value has been added or modified.", + attachments: [ + { + color: "#E7F256", + fields: [ + { + title: "Project", + value: workspaceName, + short: false + }, + { + title: "Environment", + value: environment, + short: false + }, + { + title: "Secret Path", + value: secretPath, + short: false + } + ] + } + ] + }; + case WebhookType.GENERAL: + default: + return { + event: eventName, + project: { + workspaceId, + environment, + secretPath + } + }; + } +}; export type TFnTriggerWebhookDTO = { projectId: string; @@ -76,7 +123,9 @@ export type TFnTriggerWebhookDTO = { environment: string; webhookDAL: Pick; projectEnvDAL: Pick; + projectDAL: Pick; }; + // this is reusable function // used in secret queue to trigger webhook and update status when secrets changes export const fnTriggerWebhook = async ({ @@ -84,7 +133,8 @@ export const fnTriggerWebhook = async ({ secretPath, projectId, webhookDAL, - projectEnvDAL + projectEnvDAL, + projectDAL }: TFnTriggerWebhookDTO) => { const webhooks = await webhookDAL.findAllWebhooks(projectId, environment); const toBeTriggeredHooks = webhooks.filter( @@ -93,11 +143,22 @@ export const fnTriggerWebhook = async ({ ); if (!toBeTriggeredHooks.length) return; logger.info("Secret webhook job started", { environment, secretPath, projectId }); + const project = await projectDAL.findById(projectId); const webhooksTriggered = await Promise.allSettled( toBeTriggeredHooks.map((hook) => - triggerWebhookRequest(hook, getWebhookPayload("secrets.modified", projectId, environment, secretPath)) + triggerWebhookRequest( + hook, + getWebhookPayload("secrets.modified", { + workspaceName: project.name, + workspaceId: projectId, + environment, + secretPath, + type: hook.type + }) + ) ) ); + // filter hooks by status const successWebhooks = webhooksTriggered .filter(({ status }) => status === "fulfilled") @@ -111,7 +172,11 @@ export const fnTriggerWebhook = async ({ await webhookDAL.transaction(async (tx) => { const env = await projectEnvDAL.findOne({ projectId, slug: environment }, tx); - if (!env) throw new BadRequestError({ message: "Env not found" }); + if (!env) { + throw new NotFoundError({ + message: `Environment with slug '${environment}' in project with ID '${projectId}' not found` + }); + } if (successWebhooks.length) { await webhookDAL.update( { envId: env.id, $in: { id: successWebhooks } }, diff --git a/backend/src/services/webhook/webhook-service.ts b/backend/src/services/webhook/webhook-service.ts index 4a05ad219e..a959d904c1 100644 --- a/backend/src/services/webhook/webhook-service.ts +++ b/backend/src/services/webhook/webhook-service.ts @@ -1,15 +1,15 @@ import { ForbiddenError } from "@casl/ability"; -import { SecretEncryptionAlgo, SecretKeyEncoding, TWebhooksInsert } from "@app/db/schemas"; +import { TWebhooksInsert } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { getConfig } from "@app/lib/config/env"; -import { encryptSymmetric, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; +import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; +import { NotFoundError } from "@app/lib/errors"; +import { TProjectDALFactory } from "../project/project-dal"; import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; import { TWebhookDALFactory } from "./webhook-dal"; -import { getWebhookPayload, triggerWebhookRequest } from "./webhook-fns"; +import { decryptWebhookDetails, getWebhookPayload, triggerWebhookRequest } from "./webhook-fns"; import { TCreateWebhookDTO, TDeleteWebhookDTO, @@ -21,12 +21,18 @@ import { type TWebhookServiceFactoryDep = { webhookDAL: TWebhookDALFactory; projectEnvDAL: TProjectEnvDALFactory; + projectDAL: Pick; permissionService: Pick; }; export type TWebhookServiceFactory = ReturnType; -export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionService }: TWebhookServiceFactoryDep) => { +export const webhookServiceFactory = ({ + webhookDAL, + projectEnvDAL, + permissionService, + projectDAL +}: TWebhookServiceFactoryDep) => { const createWebhook = async ({ actor, actorId, @@ -36,7 +42,8 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer webhookUrl, environment, secretPath, - webhookSecretKey + webhookSecretKey, + type }: TCreateWebhookDTO) => { const { permission } = await permissionService.getProjectPermission( actor, @@ -47,33 +54,35 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Webhooks); const env = await projectEnvDAL.findOne({ projectId, slug: environment }); - if (!env) throw new BadRequestError({ message: "Env not found" }); + if (!env) + throw new NotFoundError({ + message: `Environment with slug '${environment}' in project with ID '${projectId}' not found` + }); const insertDoc: TWebhooksInsert = { - url: webhookUrl, + url: "", // deprecated - we are moving away from plaintext URLs envId: env.id, isDisabled: false, - secretPath: secretPath || "/" + secretPath: secretPath || "/", + type }; + if (webhookSecretKey) { - const appCfg = getConfig(); - const encryptionKey = appCfg.ENCRYPTION_KEY; - const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY; - if (rootEncryptionKey) { - const { ciphertext, iv, tag } = encryptSymmetric(webhookSecretKey, rootEncryptionKey); - insertDoc.encryptedSecretKey = ciphertext; - insertDoc.iv = iv; - insertDoc.tag = tag; - insertDoc.algorithm = SecretEncryptionAlgo.AES_256_GCM; - insertDoc.keyEncoding = SecretKeyEncoding.BASE64; - } else if (encryptionKey) { - const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(webhookSecretKey, encryptionKey); - insertDoc.encryptedSecretKey = ciphertext; - insertDoc.iv = iv; - insertDoc.tag = tag; - insertDoc.algorithm = SecretEncryptionAlgo.AES_256_GCM; - insertDoc.keyEncoding = SecretKeyEncoding.UTF8; - } + const { ciphertext, iv, tag, algorithm, encoding } = infisicalSymmetricEncypt(webhookSecretKey); + insertDoc.encryptedSecretKey = ciphertext; + insertDoc.iv = iv; + insertDoc.tag = tag; + insertDoc.algorithm = algorithm; + insertDoc.keyEncoding = encoding; + } + + if (webhookUrl) { + const { ciphertext, iv, tag, algorithm, encoding } = infisicalSymmetricEncypt(webhookUrl); + insertDoc.urlCipherText = ciphertext; + insertDoc.urlIV = iv; + insertDoc.urlTag = tag; + insertDoc.algorithm = algorithm; + insertDoc.keyEncoding = encoding; } const webhook = await webhookDAL.create(insertDoc); @@ -82,7 +91,7 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer const updateWebhook = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, isDisabled }: TUpdateWebhookDTO) => { const webhook = await webhookDAL.findById(id); - if (!webhook) throw new BadRequestError({ message: "Webhook not found" }); + if (!webhook) throw new NotFoundError({ message: `Webhook with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -99,7 +108,7 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer const deleteWebhook = async ({ id, actor, actorId, actorAuthMethod, actorOrgId }: TDeleteWebhookDTO) => { const webhook = await webhookDAL.findById(id); - if (!webhook) throw new BadRequestError({ message: "Webhook not found" }); + if (!webhook) throw new NotFoundError({ message: `Webhook with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -116,7 +125,7 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer const testWebhook = async ({ id, actor, actorId, actorAuthMethod, actorOrgId }: TTestWebhookDTO) => { const webhook = await webhookDAL.findById(id); - if (!webhook) throw new BadRequestError({ message: "Webhook not found" }); + if (!webhook) throw new NotFoundError({ message: `Webhook with ID '${id}' not found` }); const { permission } = await permissionService.getProjectPermission( actor, @@ -125,13 +134,21 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer actorAuthMethod, actorOrgId ); - ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks); + const project = await projectDAL.findById(webhook.projectId); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks); let webhookError: string | undefined; try { await triggerWebhookRequest( webhook, - getWebhookPayload("test", webhook.projectId, webhook.environment.slug, webhook.secretPath) + getWebhookPayload("test", { + workspaceName: project.name, + workspaceId: webhook.projectId, + environment: webhook.environment.slug, + secretPath: webhook.secretPath, + type: webhook.type + }) ); } catch (err) { webhookError = (err as Error).message; @@ -162,7 +179,14 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer ); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks); - return webhookDAL.findAllWebhooks(projectId, environment, secretPath); + const webhooks = await webhookDAL.findAllWebhooks(projectId, environment, secretPath); + return webhooks.map((w) => { + const { url } = decryptWebhookDetails(w); + return { + ...w, + url + }; + }); }; return { diff --git a/backend/src/services/webhook/webhook-types.ts b/backend/src/services/webhook/webhook-types.ts index 7a6e92c80a..40dacb42ab 100644 --- a/backend/src/services/webhook/webhook-types.ts +++ b/backend/src/services/webhook/webhook-types.ts @@ -5,6 +5,7 @@ export type TCreateWebhookDTO = { secretPath?: string; webhookUrl: string; webhookSecretKey?: string; + type: string; } & TProjectPermission; export type TUpdateWebhookDTO = { @@ -24,3 +25,8 @@ export type TListWebhookDTO = { environment?: string; secretPath?: string; } & TProjectPermission; + +export enum WebhookType { + GENERAL = "general", + SLACK = "slack" +} diff --git a/backend/src/services/workflow-integration/workflow-integration-dal.ts b/backend/src/services/workflow-integration/workflow-integration-dal.ts new file mode 100644 index 0000000000..44fb441678 --- /dev/null +++ b/backend/src/services/workflow-integration/workflow-integration-dal.ts @@ -0,0 +1,11 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TWorkflowIntegrationDALFactory = ReturnType; + +export const workflowIntegrationDALFactory = (db: TDbClient) => { + const workflowIntegrationOrm = ormify(db, TableName.WorkflowIntegrations); + + return workflowIntegrationOrm; +}; diff --git a/backend/src/services/workflow-integration/workflow-integration-service.ts b/backend/src/services/workflow-integration/workflow-integration-service.ts new file mode 100644 index 0000000000..41419061b0 --- /dev/null +++ b/backend/src/services/workflow-integration/workflow-integration-service.ts @@ -0,0 +1,43 @@ +import { ForbiddenError } from "@casl/ability"; + +import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service"; + +import { TWorkflowIntegrationDALFactory } from "./workflow-integration-dal"; +import { TGetWorkflowIntegrationsByOrg } from "./workflow-integration-types"; + +type TWorkflowIntegrationServiceFactoryDep = { + workflowIntegrationDAL: Pick; + permissionService: Pick; +}; + +export type TWorkflowIntegrationServiceFactory = ReturnType; + +export const workflowIntegrationServiceFactory = ({ + workflowIntegrationDAL, + permissionService +}: TWorkflowIntegrationServiceFactoryDep) => { + const getIntegrationsByOrg = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod + }: TGetWorkflowIntegrationsByOrg) => { + const { permission } = await permissionService.getOrgPermission( + actor, + actorId, + actorOrgId, + actorAuthMethod, + actorOrgId + ); + + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings); + + return workflowIntegrationDAL.find({ + orgId: actorOrgId + }); + }; + return { + getIntegrationsByOrg + }; +}; diff --git a/backend/src/services/workflow-integration/workflow-integration-types.ts b/backend/src/services/workflow-integration/workflow-integration-types.ts new file mode 100644 index 0000000000..9ae56b8405 --- /dev/null +++ b/backend/src/services/workflow-integration/workflow-integration-types.ts @@ -0,0 +1,7 @@ +import { TOrgPermission } from "@app/lib/types"; + +export enum WorkflowIntegration { + SLACK = "slack" +} + +export type TGetWorkflowIntegrationsByOrg = Omit; diff --git a/backend/vitest.e2e.config.ts b/backend/vitest.e2e.config.ts index c660fed14e..684a4dc425 100644 --- a/backend/vitest.e2e.config.ts +++ b/backend/vitest.e2e.config.ts @@ -1,4 +1,4 @@ -import tsconfigPaths from "vite-tsconfig-paths"; // only if you are using custom tsconfig paths +import path from "path"; import { defineConfig } from "vitest/config"; export default defineConfig({ @@ -15,7 +15,14 @@ export default defineConfig({ useAtomics: true, isolate: false } + }, + alias: { + "./license-fns": path.resolve(__dirname, "./src/ee/services/license/__mocks__/license-fns") } }, - plugins: [tsconfigPaths()] // only if you are using custom tsconfig paths, + resolve: { + alias: { + "@app": path.resolve(__dirname, "./src") + } + } }); diff --git a/cli/.gitignore b/cli/.gitignore index 5fa3e39c55..8eb54d72b9 100644 --- a/cli/.gitignore +++ b/cli/.gitignore @@ -1,3 +1,4 @@ .infisical.json dist/ agent-config.test.yaml +.test.env \ No newline at end of file diff --git a/cli/agent-config.yaml b/cli/agent-config.yaml index e767fdca52..210c21413a 100644 --- a/cli/agent-config.yaml +++ b/cli/agent-config.yaml @@ -11,12 +11,25 @@ sinks: config: path: "access-token" templates: - - source-path: my-dot-ev-secret-template + - template-content: | + {{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }} + {{- range . }} + {{ .Key }}={{ .Value }} + {{- end }} + {{- end }} + destination-path: my-dot-env-0.env + config: + polling-interval: 60s + execute: + command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d + + - base64-template-content: e3stIHdpdGggc2VjcmV0ICIyMDJmMDRkNy1lNGNiLTQzZDQtYTI5Mi1lODkzNzEyZDYxZmMiICJkZXYiICIvIiB9fQp7ey0gcmFuZ2UgLiB9fQp7eyAuS2V5IH19PXt7IC5WYWx1ZSB9fQp7ey0gZW5kIH19Cnt7LSBlbmQgfX0= destination-path: my-dot-env.env config: polling-interval: 60s execute: command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d + - source-path: my-dot-ev-secret-template1 destination-path: my-dot-env-1.env config: diff --git a/cli/go.mod b/cli/go.mod index 833745effc..36277ac374 100644 --- a/cli/go.mod +++ b/cli/go.mod @@ -3,51 +3,78 @@ module github.com/Infisical/infisical-merge go 1.21 require ( - github.com/charmbracelet/lipgloss v0.5.0 + github.com/bradleyjkemp/cupaloy/v2 v2.8.0 + github.com/charmbracelet/lipgloss v0.9.1 + github.com/creack/pty v1.1.21 github.com/denisbrodbeck/machineid v1.0.1 github.com/fatih/semgroup v1.2.0 github.com/gitleaks/go-gitdiff v0.8.0 github.com/h2non/filetype v1.1.3 - github.com/mattn/go-isatty v0.0.14 + github.com/infisical/go-sdk v0.3.8 + github.com/mattn/go-isatty v0.0.20 github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a github.com/muesli/mango-cobra v1.2.0 github.com/muesli/reflow v0.3.0 github.com/muesli/roff v0.1.0 github.com/petar-dambovaliev/aho-corasick v0.0.0-20211021192214-5ab2d9280aa9 - github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a - github.com/rs/cors v1.9.0 + github.com/rs/cors v1.11.0 github.com/rs/zerolog v1.26.1 github.com/spf13/cobra v1.6.1 github.com/spf13/viper v1.8.1 - github.com/stretchr/testify v1.8.1 - golang.org/x/crypto v0.14.0 - golang.org/x/term v0.13.0 + github.com/stretchr/testify v1.9.0 + golang.org/x/crypto v0.25.0 + golang.org/x/term v0.22.0 gopkg.in/yaml.v2 v2.4.0 ) require ( + cloud.google.com/go/auth v0.7.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect + cloud.google.com/go/compute/metadata v0.4.0 // indirect + cloud.google.com/go/iam v1.1.11 // indirect github.com/alessio/shellescape v1.4.1 // indirect github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect - github.com/bradleyjkemp/cupaloy/v2 v2.8.0 // indirect + github.com/aws/aws-sdk-go-v2 v1.27.2 // indirect + github.com/aws/aws-sdk-go-v2/config v1.27.18 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.18 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 // indirect + github.com/aws/smithy-go v1.20.2 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/danieljoos/wincred v1.2.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/dvsekhvalnov/jose2go v1.5.0 // indirect + github.com/dvsekhvalnov/jose2go v1.6.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.4.9 // indirect + github.com/go-logr/logr v1.4.1 // indirect + github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/errors v0.20.2 // indirect github.com/go-openapi/strfmt v0.21.3 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/s2a-go v0.1.7 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect + github.com/googleapis/gax-go/v2 v2.12.5 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/magiconair/properties v1.8.5 // indirect - github.com/mattn/go-colorable v0.1.9 // indirect - github.com/mattn/go-runewidth v0.0.14 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect github.com/mitchellh/mapstructure v1.4.1 // indirect github.com/mtibben/percent v0.2.1 // indirect github.com/muesli/mango v0.1.0 // indirect github.com/muesli/mango-pflag v0.1.0 // indirect - github.com/muesli/termenv v0.11.1-0.20220204035834-5ac8409525e0 // indirect + github.com/muesli/termenv v0.15.2 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/pelletier/go-toml v1.9.3 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -58,17 +85,30 @@ require ( github.com/subosito/gotenv v1.2.0 // indirect github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect go.mongodb.org/mongo-driver v1.10.0 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sync v0.1.0 // indirect - golang.org/x/sys v0.13.0 // indirect - golang.org/x/text v0.13.0 // indirect + go.opencensus.io v0.24.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect + go.opentelemetry.io/otel v1.24.0 // indirect + go.opentelemetry.io/otel/metric v1.24.0 // indirect + go.opentelemetry.io/otel/trace v1.24.0 // indirect + golang.org/x/net v0.27.0 // indirect + golang.org/x/oauth2 v0.21.0 // indirect + golang.org/x/sync v0.7.0 // indirect + golang.org/x/sys v0.22.0 // indirect + golang.org/x/text v0.16.0 // indirect + golang.org/x/time v0.5.0 // indirect + google.golang.org/api v0.188.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b // indirect + google.golang.org/grpc v1.64.1 // indirect + google.golang.org/protobuf v1.34.2 // indirect gopkg.in/ini.v1 v1.62.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) require ( - github.com/fatih/color v1.13.0 - github.com/go-resty/resty/v2 v2.10.0 + github.com/fatih/color v1.17.0 + github.com/go-resty/resty/v2 v2.13.1 github.com/inconshreveable/mousetrap v1.0.1 // indirect github.com/jedib0t/go-pretty v4.3.0+incompatible github.com/manifoldco/promptui v0.9.0 diff --git a/cli/go.sum b/cli/go.sum index 3535791366..733a7b93de 100644 --- a/cli/go.sum +++ b/cli/go.sum @@ -18,15 +18,23 @@ cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmW cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go/auth v0.7.0 h1:kf/x9B3WTbBUHkC+1VS8wwwli9TzhSt0vSTVBmMR8Ts= +cloud.google.com/go/auth v0.7.0/go.mod h1:D+WqdrpcjmiCgWrXmLLxOVq1GACoE36chW6KXoEvuIw= +cloud.google.com/go/auth/oauth2adapt v0.2.2 h1:+TTV8aXpjeChS9M+aTtN/TjdQnzJvmzKFt//oWu7HX4= +cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRkfEK3GQcpb7C/uyF1Q= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute/metadata v0.4.0 h1:vHzJCWaM4g8XIcm8kopr3XmDA4Gy/lblD3EhhSux05c= +cloud.google.com/go/compute/metadata v0.4.0/go.mod h1:SIQh1Kkb4ZJ8zJ874fqVkslA29PRXuleyj6vOzlbK7M= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/iam v1.1.11 h1:0mQ8UKSfdHLut6pH9FM3bI55KWR46ketn0PuXleDyxw= +cloud.google.com/go/iam v1.1.11/go.mod h1:biXoiLWYIKntto2joP+62sd9uW5EpkZmKIvfNcTWlnQ= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -49,13 +57,41 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGLmAjMPwCCCo7Jf0W6f9slllCkkv7vyc1yOSg= github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/aws/aws-sdk-go-v2 v1.27.2 h1:pLsTXqX93rimAOZG2FIYraDQstZaaGVVN4tNw65v0h8= +github.com/aws/aws-sdk-go-v2 v1.27.2/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= +github.com/aws/aws-sdk-go-v2/config v1.27.18 h1:wFvAnwOKKe7QAyIxziwSKjmer9JBMH1vzIL6W+fYuKk= +github.com/aws/aws-sdk-go-v2/config v1.27.18/go.mod h1:0xz6cgdX55+kmppvPm2IaKzIXOheGJhAufacPJaXZ7c= +github.com/aws/aws-sdk-go-v2/credentials v1.17.18 h1:D/ALDWqK4JdY3OFgA2thcPO1c9aYTT5STS/CvnkqY1c= +github.com/aws/aws-sdk-go-v2/credentials v1.17.18/go.mod h1:JuitCWq+F5QGUrmMPsk945rop6bB57jdscu+Glozdnc= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 h1:dDgptDO9dxeFkXy+tEgVkzSClHZje/6JkPW5aZyEvrQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5/go.mod h1:gjvE2KBUgUQhcv89jqxrIxH9GaKs1JbZzWejj/DaHGA= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 h1:cy8ahBJuhtM8GTTSyOkfy6WVPV1IE+SS5/wfXUYuulw= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9/go.mod h1:CZBXGLaJnEZI6EVNcPd7a6B5IC5cA/GkRWtu9fp3S6Y= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 h1:A4SYk07ef04+vxZToz9LWvAXl9LW0NClpPpMsi31cz0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9/go.mod h1:5jJcHuwDagxN+ErjQ3PU3ocf6Ylc/p9x+BLO/+X4iXw= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 h1:o4T+fKxA3gTMcluBNZZXE9DNaMkJuUL1O3mffCUjoJo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11/go.mod h1:84oZdJ+VjuJKs9v1UTC9NaodRZRseOXCTgku+vQJWR8= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 h1:gEYM2GSpr4YNWc6hCd5nod4+d4kd9vWIAWrmGuLdlMw= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.11/go.mod h1:gVvwPdPNYehHSP9Rs7q27U1EU+3Or2ZpXvzAYJNh63w= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 h1:iXjh3uaH3vsVcnyZX7MqCoCfcyxIrVE9iOQruRaWPrQ= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5/go.mod h1:5ZXesEuy/QcO0WUnt+4sDkxhdXRHTu2yG0uCSH8B6os= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 h1:M/1u4HBpwLuMtjlxuI2y6HoVLzF5e2mfxHCg7ZVMYmk= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.12/go.mod h1:kcfd+eTdEi/40FIbLq4Hif3XMXnl5b/+t/KTfLt9xIk= +github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= +github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M= github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/charmbracelet/lipgloss v0.5.0 h1:lulQHuVeodSgDez+3rGiuxlPVXSnhth442DATR2/8t8= -github.com/charmbracelet/lipgloss v0.5.0/go.mod h1:EZLha/HbzEt7cYqdFPovlqy5FZPj0xFhg5SaqxScmgs= +github.com/charmbracelet/lipgloss v0.9.1 h1:PNyd3jvaJbg4jRHKWXnCj1akQm4rh8dbEzN1p/u1KWg= +github.com/charmbracelet/lipgloss v0.9.1/go.mod h1:1mPmG4cxScwUQALAAnacHaigiiHB9Pmr+v1VEawJl6I= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM= github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= @@ -74,6 +110,8 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0= +github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -81,8 +119,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ= github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI= -github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM= -github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY= +github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -91,10 +129,12 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= +github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/fatih/semgroup v1.2.0 h1:h/OLXwEM+3NNyAdZEpMiH1OzfplU09i2qXPVThGZvyg= github.com/fatih/semgroup v1.2.0/go.mod h1:1KAD4iIYfXjE4U13B48VM4z9QUwV5Tt8O4rS879kgm8= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -103,12 +143,17 @@ github.com/gitleaks/go-gitdiff v0.8.0/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/errors v0.20.2 h1:dxy7PGTqEh94zj2E3h1cUmQQWiM1+aeCROfAr02EmK8= github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtKG7o= github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg= -github.com/go-resty/resty/v2 v2.10.0 h1:Qla4W/+TMmv0fOeeRqzEpXPLfTUnR5HZ1+lGs+CkiCo= -github.com/go-resty/resty/v2 v2.10.0/go.mod h1:iiP/OpA0CkcL3IGt1O0+/SIItFUbkkyw5BGXiVdTu+A= +github.com/go-resty/resty/v2 v2.13.1 h1:x+LHXBI2nMB1vqndymf26quycC4aggYJ7DECYbiz03g= +github.com/go-resty/resty/v2 v2.13.1/go.mod h1:GznXlLxkq6Nh4sU59rPmUw3VtgpO3aS96ORAI6Q7d+0= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= @@ -117,6 +162,8 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -142,6 +189,8 @@ github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -155,8 +204,9 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -173,11 +223,18 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.12.5 h1:8gw9KZK8TiVKB6q3zHY3SBzLnrGp6HQjyfYBYGmXdxA= +github.com/googleapis/gax-go/v2 v2.12.5/go.mod h1:BUDKcWo+RaKq5SC9vVYL0wLADa3VcfswbOMMRmB9H3E= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= @@ -208,6 +265,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1: github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/infisical/go-sdk v0.3.8 h1:0dGOhF3cwt0q5QzpnUs4lxwBiEza+DQYOyvEn7AfrM0= +github.com/infisical/go-sdk v0.3.8/go.mod h1:HHW7DgUqoolyQIUw/9HdpkZ3bDLwWyZ0HEtYiVaDKQw= github.com/jedib0t/go-pretty v4.3.0+incompatible h1:CGs8AVhEKg/n9YbUenWmNStRW2PHJzaeDodcfvRAbIo= github.com/jedib0t/go-pretty v4.3.0+incompatible/go.mod h1:XemHduiw8R651AF9Pt4FwCTKeG3oo7hrHJAoznj9nag= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -231,17 +290,15 @@ github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPK github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= -github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU= -github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= @@ -267,13 +324,12 @@ github.com/muesli/mango-cobra v1.2.0 h1:DQvjzAM0PMZr85Iv9LIMaYISpTOliMEg+uMFtNbY github.com/muesli/mango-cobra v1.2.0/go.mod h1:vMJL54QytZAJhCT13LPVDfkvCUJ5/4jNUKF/8NC2UjA= github.com/muesli/mango-pflag v0.1.0 h1:UADqbYgpUyRoBja3g6LUL+3LErjpsOwaC9ywvBWe7Sg= github.com/muesli/mango-pflag v0.1.0/go.mod h1:YEQomTxaCUp8PrbhFh10UfbhbQrM/xJ4i2PB8VTLLW0= -github.com/muesli/reflow v0.2.1-0.20210115123740-9e1d0d53df68/go.mod h1:Xk+z4oIWdQqJzsxyjgl3P22oYZnHdZ8FFTHAQQt5BMQ= github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8= github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig= -github.com/muesli/termenv v0.11.1-0.20220204035834-5ac8409525e0 h1:STjmj0uFfRryL9fzRA/OupNppeAID6QJYPMavTL7jtY= -github.com/muesli/termenv v0.11.1-0.20220204035834-5ac8409525e0/go.mod h1:Bd5NYQ7pd+SrtBSrSNoBBmXlcY8+Xj4BMJgh8qcZrvs= +github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo= +github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= @@ -283,8 +339,6 @@ github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5d github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/petar-dambovaliev/aho-corasick v0.0.0-20211021192214-5ab2d9280aa9 h1:lL+y4Xv20pVlCGyLzNHRC0I0rIHhIL1lTvHizoS/dU8= github.com/petar-dambovaliev/aho-corasick v0.0.0-20211021192214-5ab2d9280aa9/go.mod h1:EHPiTAKtiFmrMldLUNswFwfZ2eJIYBHktdaUTZxYWRw= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= @@ -299,8 +353,8 @@ github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rs/cors v1.9.0 h1:l9HGsTsHJcvW14Nk7J9KFz8bzeAWXn3CG6bgt7LsrAE= -github.com/rs/cors v1.9.0/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/cors v1.11.0 h1:0B9GE/r9Bc2UxRMMtymBkHTenPkHDv0CW4Y98GBY+po= +github.com/rs/cors v1.11.0/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.26.1 h1:/ihwxqH+4z8UxyI70wM1z9yCvkWcfz/a3mj48k/Zngc= github.com/rs/zerolog v1.26.1/go.mod h1:/wSSJWX7lVrsOwlbyTRSOJvqRlc+WjWlfes+CiJ+tmc= @@ -328,8 +382,9 @@ github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -338,8 +393,9 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= @@ -370,6 +426,18 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= +go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= @@ -383,8 +451,10 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30= +golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -463,8 +533,10 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys= +golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -477,6 +549,8 @@ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs= +golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -489,8 +563,9 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -508,7 +583,6 @@ golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -536,22 +610,26 @@ golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= +golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk= +golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -563,13 +641,15 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -650,6 +730,8 @@ google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjR google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/api v0.188.0 h1:51y8fJ/b1AaaBRJr4yWm96fPcuxSo0JcegXE3DaHQHw= +google.golang.org/api v0.188.0/go.mod h1:VR0d+2SIiWOYG3r/jdm7adPW9hI2aRv9ETOSCQ9Beag= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -698,6 +780,10 @@ google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 h1:0+ozOGcrp+Y8Aq8TLNN2Aliibms5LEzsq99ZZmAGYm0= +google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094/go.mod h1:fJ/e3If/Q67Mj99hin0hMhiNyCRmt6BQ2aWIJshUSJw= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b h1:04+jVzTs2XBnOZcPsLnmrTGqltqJbZQ1Ey26hjYdQQ0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -718,6 +804,8 @@ google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA= +google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -730,6 +818,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/cli/packages/api/api.go b/cli/packages/api/api.go index 01f29a03aa..35767cd3f1 100644 --- a/cli/packages/api/api.go +++ b/cli/packages/api/api.go @@ -225,25 +225,6 @@ func CallIsAuthenticated(httpClient *resty.Client) bool { return true } -func CallGetAccessibleEnvironments(httpClient *resty.Client, request GetAccessibleEnvironmentsRequest) (GetAccessibleEnvironmentsResponse, error) { - var accessibleEnvironmentsResponse GetAccessibleEnvironmentsResponse - response, err := httpClient. - R(). - SetResult(&accessibleEnvironmentsResponse). - SetHeader("User-Agent", USER_AGENT). - Get(fmt.Sprintf("%v/v2/workspace/%s/environments", config.INFISICAL_URL, request.WorkspaceId)) - - if err != nil { - return GetAccessibleEnvironmentsResponse{}, err - } - - if response.IsError() { - return GetAccessibleEnvironmentsResponse{}, fmt.Errorf("CallGetAccessibleEnvironments: Unsuccessful response: [response=%v] [response-code=%v] [url=%s]", response, response.StatusCode(), response.Request.URL) - } - - return accessibleEnvironmentsResponse, nil -} - func CallGetNewAccessTokenWithRefreshToken(httpClient *resty.Client, refreshToken string) (GetNewAccessTokenWithRefreshTokenResponse, error) { var newAccessToken GetNewAccessTokenWithRefreshTokenResponse response, err := httpClient. @@ -267,45 +248,6 @@ func CallGetNewAccessTokenWithRefreshToken(httpClient *resty.Client, refreshToke return newAccessToken, nil } -func CallGetSecretsV3(httpClient *resty.Client, request GetEncryptedSecretsV3Request) (GetEncryptedSecretsV3Response, error) { - var secretsResponse GetEncryptedSecretsV3Response - - httpRequest := httpClient. - R(). - SetResult(&secretsResponse). - SetHeader("User-Agent", USER_AGENT). - SetQueryParam("environment", request.Environment). - SetQueryParam("workspaceId", request.WorkspaceId) - - if request.Recursive { - httpRequest.SetQueryParam("recursive", "true") - } - - if request.IncludeImport { - httpRequest.SetQueryParam("include_imports", "true") - } - - if request.SecretPath != "" { - httpRequest.SetQueryParam("secretPath", request.SecretPath) - } - - response, err := httpRequest.Get(fmt.Sprintf("%v/v3/secrets", config.INFISICAL_URL)) - - if err != nil { - return GetEncryptedSecretsV3Response{}, fmt.Errorf("CallGetSecretsV3: Unable to complete api request [err=%s]", err) - } - - if response.IsError() { - if response.StatusCode() == 401 { - return GetEncryptedSecretsV3Response{}, fmt.Errorf("CallGetSecretsV3: Request to access secrets with [environment=%v] [path=%v] [workspaceId=%v] is denied. Please check if your authentication method has access to requested scope", request.Environment, request.SecretPath, request.WorkspaceId) - } else { - return GetEncryptedSecretsV3Response{}, fmt.Errorf("CallGetSecretsV3: Unsuccessful response. Please make sure your secret path, workspace and environment name are all correct [response=%v]", response.RawResponse) - } - } - - return secretsResponse, nil -} - func CallGetFoldersV1(httpClient *resty.Client, request GetFoldersV1Request) (GetFoldersV1Response, error) { var foldersResponse GetFoldersV1Response httpRequest := httpClient. @@ -370,34 +312,15 @@ func CallDeleteFolderV1(httpClient *resty.Client, request DeleteFolderV1Request) return folderResponse, nil } -func CallCreateSecretsV3(httpClient *resty.Client, request CreateSecretV3Request) error { +func CallDeleteSecretsRawV3(httpClient *resty.Client, request DeleteSecretV3Request) error { + var secretsResponse GetEncryptedSecretsV3Response response, err := httpClient. R(). SetResult(&secretsResponse). SetHeader("User-Agent", USER_AGENT). SetBody(request). - Post(fmt.Sprintf("%v/v3/secrets/%s", config.INFISICAL_URL, request.SecretName)) - - if err != nil { - return fmt.Errorf("CallCreateSecretsV3: Unable to complete api request [err=%s]", err) - } - - if response.IsError() { - return fmt.Errorf("CallCreateSecretsV3: Unsuccessful response. Please make sure your secret path, workspace and environment name are all correct [response=%s]", response) - } - - return nil -} - -func CallDeleteSecretsV3(httpClient *resty.Client, request DeleteSecretV3Request) error { - var secretsResponse GetEncryptedSecretsV3Response - response, err := httpClient. - R(). - SetResult(&secretsResponse). - SetHeader("User-Agent", USER_AGENT). - SetBody(request). - Delete(fmt.Sprintf("%v/v3/secrets/%s", config.INFISICAL_URL, request.SecretName)) + Delete(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName)) if err != nil { return fmt.Errorf("CallDeleteSecretsV3: Unable to complete api request [err=%s]", err) @@ -410,46 +333,6 @@ func CallDeleteSecretsV3(httpClient *resty.Client, request DeleteSecretV3Request return nil } -func CallUpdateSecretsV3(httpClient *resty.Client, request UpdateSecretByNameV3Request, secretName string) error { - var secretsResponse GetEncryptedSecretsV3Response - response, err := httpClient. - R(). - SetResult(&secretsResponse). - SetHeader("User-Agent", USER_AGENT). - SetBody(request). - Patch(fmt.Sprintf("%v/v3/secrets/%s", config.INFISICAL_URL, secretName)) - - if err != nil { - return fmt.Errorf("CallUpdateSecretsV3: Unable to complete api request [err=%s]", err) - } - - if response.IsError() { - return fmt.Errorf("CallUpdateSecretsV3: Unsuccessful response. Please make sure your secret path, workspace and environment name are all correct [response=%s]", response) - } - - return nil -} - -func CallGetSingleSecretByNameV3(httpClient *resty.Client, request CreateSecretV3Request) error { - var secretsResponse GetEncryptedSecretsV3Response - response, err := httpClient. - R(). - SetResult(&secretsResponse). - SetHeader("User-Agent", USER_AGENT). - SetBody(request). - Post(fmt.Sprintf("%v/v3/secrets/%s", config.INFISICAL_URL, request.SecretName)) - - if err != nil { - return fmt.Errorf("CallGetSingleSecretByNameV3: Unable to complete api request [err=%s]", err) - } - - if response.IsError() { - return fmt.Errorf("CallGetSingleSecretByNameV3: Unsuccessful response. Please make sure your secret path, workspace and environment name are all correct [response=%s]", response) - } - - return nil -} - func CallCreateServiceToken(httpClient *resty.Client, request CreateServiceTokenRequest) (CreateServiceTokenResponse, error) { var createServiceTokenResponse CreateServiceTokenResponse response, err := httpClient. @@ -490,7 +373,7 @@ func CallUniversalAuthLogin(httpClient *resty.Client, request UniversalAuthLogin return universalAuthLoginResponse, nil } -func CallUniversalAuthRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) { +func CallMachineIdentityRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) { var universalAuthRefreshResponse UniversalAuthRefreshResponse response, err := httpClient. R(). @@ -500,11 +383,11 @@ func CallUniversalAuthRefreshAccessToken(httpClient *resty.Client, request Unive Post(fmt.Sprintf("%v/v1/auth/token/renew", config.INFISICAL_URL)) if err != nil { - return UniversalAuthRefreshResponse{}, fmt.Errorf("CallUniversalAuthRefreshAccessToken: Unable to complete api request [err=%s]", err) + return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unable to complete api request [err=%s]", err) } if response.IsError() { - return UniversalAuthRefreshResponse{}, fmt.Errorf("CallUniversalAuthRefreshAccessToken: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String()) + return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String()) } return universalAuthRefreshResponse, nil @@ -521,6 +404,10 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques SetQueryParam("environment", request.Environment). SetQueryParam("secretPath", request.SecretPath) + if request.TagSlugs != "" { + req.SetQueryParam("tagSlugs", request.TagSlugs) + } + if request.IncludeImport { req.SetQueryParam("include_imports", "true") } @@ -528,14 +415,22 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques req.SetQueryParam("recursive", "true") } + if request.ExpandSecretReferences { + req.SetQueryParam("expandSecretReferences", "true") + } + response, err := req.Get(fmt.Sprintf("%v/v3/secrets/raw", config.INFISICAL_URL)) if err != nil { return GetRawSecretsV3Response{}, fmt.Errorf("CallGetRawSecretsV3: Unable to complete api request [err=%w]", err) } - if response.IsError() && strings.Contains(response.String(), "bot_not_found_error") { - return GetRawSecretsV3Response{}, fmt.Errorf("project with id %s is a legacy project type, please navigate to project settings and disable end to end encryption then try again", request.WorkspaceId) + if response.IsError() && + (strings.Contains(response.String(), "bot_not_found_error") || + strings.Contains(strings.ToLower(response.String()), "failed to find bot key") || + strings.Contains(strings.ToLower(response.String()), "bot is not active")) { + return GetRawSecretsV3Response{}, fmt.Errorf(`Project with id %s is incompatible with your current CLI version. Upgrade your project by visiting the project settings page. If you're self-hosting and project upgrade option isn't yet available, contact your administrator to upgrade your Infisical instance to the latest release. + `, request.WorkspaceId) } if response.IsError() { @@ -547,6 +442,34 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques return getRawSecretsV3Response, nil } +func CallFetchSingleSecretByName(httpClient *resty.Client, request GetRawSecretV3ByNameRequest) (GetRawSecretV3ByNameResponse, error) { + var getRawSecretV3ByNameResponse GetRawSecretV3ByNameResponse + response, err := httpClient. + R(). + SetHeader("User-Agent", USER_AGENT). + SetResult(&getRawSecretV3ByNameResponse). + SetBody(request). + SetQueryParam("expandSecretReferences", "true"). + SetQueryParam("include_imports", "true"). + SetQueryParam("environment", request.Environment). + SetQueryParam("secretPath", request.SecretPath). + SetQueryParam("workspaceId", request.WorkspaceID). + SetQueryParam("type", "shared"). + Get(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName)) + + if err != nil { + return GetRawSecretV3ByNameResponse{}, fmt.Errorf("CallFetchSingleSecretByName: Unable to complete api request [err=%w]", err) + } + + if response.IsError() { + return GetRawSecretV3ByNameResponse{}, fmt.Errorf("CallFetchSingleSecretByName: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String()) + } + + getRawSecretV3ByNameResponse.ETag = response.Header().Get(("etag")) + + return getRawSecretV3ByNameResponse, nil +} + func CallCreateDynamicSecretLeaseV1(httpClient *resty.Client, request CreateDynamicSecretLeaseV1Request) (CreateDynamicSecretLeaseV1Response, error) { var createDynamicSecretLeaseResponse CreateDynamicSecretLeaseV1Response response, err := httpClient. @@ -566,3 +489,39 @@ func CallCreateDynamicSecretLeaseV1(httpClient *resty.Client, request CreateDyna return createDynamicSecretLeaseResponse, nil } + +func CallCreateRawSecretsV3(httpClient *resty.Client, request CreateRawSecretV3Request) error { + response, err := httpClient. + R(). + SetHeader("User-Agent", USER_AGENT). + SetBody(request). + Post(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName)) + + if err != nil { + return fmt.Errorf("CallCreateRawSecretsV3: Unable to complete api request [err=%w]", err) + } + + if response.IsError() { + return fmt.Errorf("CallCreateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String()) + } + + return nil +} + +func CallUpdateRawSecretsV3(httpClient *resty.Client, request UpdateRawSecretByNameV3Request) error { + response, err := httpClient. + R(). + SetHeader("User-Agent", USER_AGENT). + SetBody(request). + Patch(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName)) + + if err != nil { + return fmt.Errorf("CallUpdateRawSecretsV3: Unable to complete api request [err=%w]", err) + } + + if response.IsError() { + return fmt.Errorf("CallUpdateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String()) + } + + return nil +} diff --git a/cli/packages/api/model.go b/cli/packages/api/model.go index 56b9807f7a..bc63218729 100644 --- a/cli/packages/api/model.go +++ b/cli/packages/api/model.go @@ -136,7 +136,9 @@ type GetOrganizationsResponse struct { } type SelectOrganizationResponse struct { - Token string `json:"token"` + Token string `json:"token"` + MfaEnabled bool `json:"isMfaEnabled"` + MfaMethod string `json:"mfaMethod"` } type SelectOrganizationRequest struct { @@ -161,6 +163,14 @@ type Secret struct { PlainTextKey string `json:"plainTextKey"` } +type RawSecret struct { + SecretKey string `json:"secretKey,omitempty"` + SecretValue string `json:"secretValue,omitempty"` + Type string `json:"type,omitempty"` + SecretComment string `json:"secretComment,omitempty"` + ID string `json:"id,omitempty"` +} + type GetEncryptedWorkspaceKeyRequest struct { WorkspaceId string `json:"workspaceId"` } @@ -233,6 +243,7 @@ type GetLoginOneV2Response struct { type GetLoginTwoV2Request struct { Email string `json:"email"` ClientProof string `json:"clientProof"` + Password string `json:"password"` } type GetLoginTwoV2Response struct { @@ -250,8 +261,9 @@ type GetLoginTwoV2Response struct { } type VerifyMfaTokenRequest struct { - Email string `json:"email"` - MFAToken string `json:"mfaToken"` + Email string `json:"email"` + MFAToken string `json:"mfaToken"` + MFAMethod string `json:"mfaMethod"` } type VerifyMfaTokenResponse struct { @@ -409,12 +421,23 @@ type CreateSecretV3Request struct { SecretPath string `json:"secretPath"` } +type CreateRawSecretV3Request struct { + SecretName string `json:"-"` + WorkspaceID string `json:"workspaceId"` + Type string `json:"type,omitempty"` + Environment string `json:"environment"` + SecretPath string `json:"secretPath,omitempty"` + SecretValue string `json:"secretValue"` + SecretComment string `json:"secretComment,omitempty"` + SkipMultilineEncoding bool `json:"skipMultilineEncoding,omitempty"` +} + type DeleteSecretV3Request struct { SecretName string `json:"secretName"` WorkspaceId string `json:"workspaceId"` Environment string `json:"environment"` - Type string `json:"type"` - SecretPath string `json:"secretPath"` + Type string `json:"type,omitempty"` + SecretPath string `json:"secretPath,omitempty"` } type UpdateSecretByNameV3Request struct { @@ -427,6 +450,15 @@ type UpdateSecretByNameV3Request struct { SecretValueTag string `json:"secretValueTag"` } +type UpdateRawSecretByNameV3Request struct { + SecretName string `json:"-"` + WorkspaceID string `json:"workspaceId"` + Environment string `json:"environment"` + SecretPath string `json:"secretPath,omitempty"` + SecretValue string `json:"secretValue"` + Type string `json:"type,omitempty"` +} + type GetSingleSecretByNameV3Request struct { SecretName string `json:"secretName"` WorkspaceId string `json:"workspaceId"` @@ -540,11 +572,13 @@ type CreateDynamicSecretLeaseV1Response struct { } type GetRawSecretsV3Request struct { - Environment string `json:"environment"` - WorkspaceId string `json:"workspaceId"` - SecretPath string `json:"secretPath"` - IncludeImport bool `json:"include_imports"` - Recursive bool `json:"recursive"` + Environment string `json:"environment"` + WorkspaceId string `json:"workspaceId"` + SecretPath string `json:"secretPath"` + IncludeImport bool `json:"include_imports"` + Recursive bool `json:"recursive"` + TagSlugs string `json:"tagSlugs,omitempty"` + ExpandSecretReferences bool `json:"expandSecretReferences,omitempty"` } type GetRawSecretsV3Response struct { @@ -557,7 +591,31 @@ type GetRawSecretsV3Response struct { SecretKey string `json:"secretKey"` SecretValue string `json:"secretValue"` SecretComment string `json:"secretComment"` + SecretPath string `json:"secretPath"` } `json:"secrets"` Imports []ImportedRawSecretV3 `json:"imports"` ETag string } + +type GetRawSecretV3ByNameRequest struct { + SecretName string `json:"secretName"` + WorkspaceID string `json:"workspaceId"` + Type string `json:"type,omitempty"` + Environment string `json:"environment"` + SecretPath string `json:"secretPath,omitempty"` +} + +type GetRawSecretV3ByNameResponse struct { + Secret struct { + ID string `json:"_id"` + Version int `json:"version"` + Workspace string `json:"workspace"` + Type string `json:"type"` + Environment string `json:"environment"` + SecretKey string `json:"secretKey"` + SecretValue string `json:"secretValue"` + SecretComment string `json:"secretComment"` + SecretPath string `json:"secretPath"` + } `json:"secret"` + ETag string +} diff --git a/cli/packages/cmd/agent.go b/cli/packages/cmd/agent.go index f2b05d1f0c..f4fe94a6e7 100644 --- a/cli/packages/cmd/agent.go +++ b/cli/packages/cmd/agent.go @@ -7,6 +7,7 @@ import ( "bytes" "context" "encoding/base64" + "encoding/json" "fmt" "io/ioutil" "os" @@ -20,6 +21,7 @@ import ( "text/template" "time" + infisicalSdk "github.com/infisical/go-sdk" "github.com/rs/zerolog/log" "gopkg.in/yaml.v2" @@ -59,9 +61,26 @@ type UniversalAuth struct { RemoveClientSecretOnRead bool `yaml:"remove_client_secret_on_read"` } -type OAuthConfig struct { - ClientID string `yaml:"client-id"` - ClientSecret string `yaml:"client-secret"` +type KubernetesAuth struct { + IdentityID string `yaml:"identity-id"` + ServiceAccountToken string `yaml:"service-account-token"` +} + +type AzureAuth struct { + IdentityID string `yaml:"identity-id"` +} + +type GcpIdTokenAuth struct { + IdentityID string `yaml:"identity-id"` +} + +type GcpIamAuth struct { + IdentityID string `yaml:"identity-id"` + ServiceAccountKey string `yaml:"service-account-key"` +} + +type AwsIamAuth struct { + IdentityID string `yaml:"identity-id"` } type Sink struct { @@ -77,6 +96,7 @@ type Template struct { SourcePath string `yaml:"source-path"` Base64TemplateContent string `yaml:"base64-template-content"` DestinationPath string `yaml:"destination-path"` + TemplateContent string `yaml:"template-content"` Config struct { // Configurations for the template PollingInterval string `yaml:"polling-interval"` // How often to poll for changes in the secret @@ -87,15 +107,6 @@ type Template struct { } `yaml:"config"` } -func newAgentTemplateChannels(templates []Template) map[string]chan bool { - // we keep each destination as an identifier for various channel - templateChannel := make(map[string]chan bool) - for _, template := range templates { - templateChannel[template.DestinationPath] = make(chan bool) - } - return templateChannel -} - type DynamicSecretLease struct { LeaseID string ExpireAt time.Time @@ -256,6 +267,14 @@ func WriteBytesToFile(data *bytes.Buffer, outputPath string) error { return err } +func ParseAuthConfig(authConfigFile []byte, destination interface{}) error { + if err := yaml.Unmarshal(authConfigFile, destination); err != nil { + return err + } + + return nil +} + func ParseAgentConfig(configFile []byte) (*Config, error) { var rawConfig struct { Infisical InfisicalConfig `yaml:"infisical"` @@ -283,42 +302,44 @@ func ParseAgentConfig(configFile []byte) (*Config, error) { config := &Config{ Infisical: rawConfig.Infisical, Auth: AuthConfig{ - Type: rawConfig.Auth.Type, + Type: rawConfig.Auth.Type, + Config: rawConfig.Auth.Config, }, Sinks: rawConfig.Sinks, Templates: rawConfig.Templates, } - // Marshal and then unmarshal the config based on the type - configBytes, err := yaml.Marshal(rawConfig.Auth.Config) - if err != nil { - return nil, err - } - - switch rawConfig.Auth.Type { - case "universal-auth": - var tokenConfig UniversalAuth - if err := yaml.Unmarshal(configBytes, &tokenConfig); err != nil { - return nil, err - } - - config.Auth.Config = tokenConfig - case "oauth": // aws, gcp, k8s service account, etc - var oauthConfig OAuthConfig - if err := yaml.Unmarshal(configBytes, &oauthConfig); err != nil { - return nil, err - } - config.Auth.Config = oauthConfig - default: - return nil, fmt.Errorf("unknown auth type: %s", rawConfig.Auth.Type) - } - return config, nil } -func secretTemplateFunction(accessToken string, existingEtag string, currentEtag *string) func(string, string, string) ([]models.SingleEnvironmentVariable, error) { - return func(projectID, envSlug, secretPath string) ([]models.SingleEnvironmentVariable, error) { - res, err := util.GetPlainTextSecretsViaMachineIdentity(accessToken, projectID, envSlug, secretPath, false, false) +type secretArguments struct { + IsRecursive bool `json:"recursive"` + ShouldExpandSecretReferences *bool `json:"expandSecretReferences,omitempty"` +} + +func (s *secretArguments) SetDefaults() { + if s.ShouldExpandSecretReferences == nil { + var bool = true + s.ShouldExpandSecretReferences = &bool + } +} + +func secretTemplateFunction(accessToken string, existingEtag string, currentEtag *string) func(string, string, string, ...string) ([]models.SingleEnvironmentVariable, error) { + // ...string is because golang doesn't have optional arguments. + // thus we make it slice and pick it only first element + return func(projectID, envSlug, secretPath string, args ...string) ([]models.SingleEnvironmentVariable, error) { + var parsedArguments secretArguments + // to make it optional + if len(args) > 0 { + err := json.Unmarshal([]byte(args[0]), &parsedArguments) + if err != nil { + return nil, err + } + } + + parsedArguments.SetDefaults() + + res, err := util.GetPlainTextSecretsV3(accessToken, projectID, envSlug, secretPath, false, parsedArguments.IsRecursive, "", *parsedArguments.ShouldExpandSecretReferences) if err != nil { return nil, err } @@ -327,9 +348,22 @@ func secretTemplateFunction(accessToken string, existingEtag string, currentEtag *currentEtag = res.Etag } - expandedSecrets := util.ExpandSecrets(res.Secrets, models.ExpandSecretsAuthentication{UniversalAuthAccessToken: accessToken}, "") + return res.Secrets, nil + } +} - return expandedSecrets, nil +func getSingleSecretTemplateFunction(accessToken string, existingEtag string, currentEtag *string) func(string, string, string, string) (models.SingleEnvironmentVariable, error) { + return func(projectID, envSlug, secretPath, secretName string) (models.SingleEnvironmentVariable, error) { + secret, requestEtag, err := util.GetSinglePlainTextSecretByNameV3(accessToken, projectID, envSlug, secretPath, secretName) + if err != nil { + return models.SingleEnvironmentVariable{}, err + } + + if existingEtag != requestEtag { + *currentEtag = requestEtag + } + + return secret, nil } } @@ -337,7 +371,7 @@ func dynamicSecretTemplateFunction(accessToken string, dynamicSecretManager *Dyn return func(args ...string) (map[string]interface{}, error) { argLength := len(args) if argLength != 4 && argLength != 5 { - return nil, fmt.Errorf("Invalid arguments found for dynamic-secret function. Check template %i", templateId) + return nil, fmt.Errorf("invalid arguments found for dynamic-secret function. Check template %d", templateId) } projectSlug, envSlug, secretPath, slug, ttl := args[0], args[1], args[2], args[3], "" @@ -364,9 +398,12 @@ func ProcessTemplate(templateId int, templatePath string, data interface{}, acce // custom template function to fetch secrets from Infisical secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag) dynamicSecretFunction := dynamicSecretTemplateFunction(accessToken, dynamicSecretManager, templateId) + getSingleSecretFunction := getSingleSecretTemplateFunction(accessToken, existingEtag, currentEtag) funcs := template.FuncMap{ - "secret": secretFunction, - "dynamic_secret": dynamicSecretFunction, + "secret": secretFunction, // depreciated + "listSecrets": secretFunction, + "dynamic_secret": dynamicSecretFunction, + "getSecretByName": getSingleSecretFunction, "minus": func(a, b int) int { return a - b }, @@ -420,33 +457,79 @@ func ProcessBase64Template(templateId int, encodedTemplate string, data interfac return &buf, nil } -type AgentManager struct { - accessToken string - accessTokenTTL time.Duration - accessTokenMaxTTL time.Duration - accessTokenFetchedTime time.Time - accessTokenRefreshedTime time.Time - mutex sync.Mutex - filePaths []Sink // Store file paths if needed - templates []Template - dynamicSecretLeases *DynamicSecretLeaseManager - clientIdPath string - clientSecretPath string - newAccessTokenNotificationChan chan bool - removeClientSecretOnRead bool - cachedClientSecret string - exitAfterAuth bool +func ProcessLiteralTemplate(templateId int, templateString string, data interface{}, accessToken string, existingEtag string, currentEtag *string, dynamicSecretLeaser *DynamicSecretLeaseManager) (*bytes.Buffer, error) { + secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag) // TODO: Fix this + dynamicSecretFunction := dynamicSecretTemplateFunction(accessToken, dynamicSecretLeaser, templateId) + funcs := template.FuncMap{ + "secret": secretFunction, + "dynamic_secret": dynamicSecretFunction, + } + + templateName := "literalTemplate" + + tmpl, err := template.New(templateName).Funcs(funcs).Parse(templateString) + if err != nil { + return nil, err + } + + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + return nil, err + } + + return &buf, nil } -func NewAgentManager(fileDeposits []Sink, templates []Template, clientIdPath string, clientSecretPath string, newAccessTokenNotificationChan chan bool, removeClientSecretOnRead bool, exitAfterAuth bool) *AgentManager { +type AgentManager struct { + accessToken string + accessTokenTTL time.Duration + accessTokenMaxTTL time.Duration + accessTokenFetchedTime time.Time + accessTokenRefreshedTime time.Time + mutex sync.Mutex + filePaths []Sink // Store file paths if needed + templates []Template + dynamicSecretLeases *DynamicSecretLeaseManager + + authConfigBytes []byte + authStrategy util.AuthStrategyType + + newAccessTokenNotificationChan chan bool + removeUniversalAuthClientSecretOnRead bool + cachedUniversalAuthClientSecret string + exitAfterAuth bool + + infisicalClient infisicalSdk.InfisicalClientInterface +} + +type NewAgentMangerOptions struct { + FileDeposits []Sink + Templates []Template + + AuthConfigBytes []byte + AuthStrategy util.AuthStrategyType + + NewAccessTokenNotificationChan chan bool + ExitAfterAuth bool +} + +func NewAgentManager(options NewAgentMangerOptions) *AgentManager { + return &AgentManager{ - filePaths: fileDeposits, - templates: templates, - clientIdPath: clientIdPath, - clientSecretPath: clientSecretPath, - newAccessTokenNotificationChan: newAccessTokenNotificationChan, - removeClientSecretOnRead: removeClientSecretOnRead, - exitAfterAuth: exitAfterAuth, + filePaths: options.FileDeposits, + templates: options.Templates, + + authConfigBytes: options.AuthConfigBytes, + authStrategy: options.AuthStrategy, + + newAccessTokenNotificationChan: options.NewAccessTokenNotificationChan, + exitAfterAuth: options.ExitAfterAuth, + + infisicalClient: infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{ + SiteUrl: config.INFISICAL_URL, + UserAgent: api.USER_AGENT, // ? Should we perhaps use a different user agent for the Agent for better analytics? + AutoTokenRefresh: false, + }), } } @@ -469,52 +552,164 @@ func (tm *AgentManager) GetToken() string { return tm.accessToken } +func (tm *AgentManager) FetchUniversalAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, e error) { + + var universalAuthConfig UniversalAuth + if err := ParseAuthConfig(tm.authConfigBytes, &universalAuthConfig); err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err) + } + + clientID, err := util.GetEnvVarOrFileContent(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME, universalAuthConfig.ClientIDPath) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get client id: %v", err) + } + + clientSecret, err := util.GetEnvVarOrFileContent("INFISICAL_UNIVERSAL_CLIENT_SECRET", universalAuthConfig.ClientSecretPath) + if err != nil { + if len(tm.cachedUniversalAuthClientSecret) == 0 { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get client secret: %v", err) + } + clientSecret = tm.cachedUniversalAuthClientSecret + } + + tm.cachedUniversalAuthClientSecret = clientSecret + if tm.removeUniversalAuthClientSecretOnRead { + defer os.Remove(universalAuthConfig.ClientSecretPath) + } + + return tm.infisicalClient.Auth().UniversalAuthLogin(clientID, clientSecret) + +} + +func (tm *AgentManager) FetchKubernetesAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) { + + var kubernetesAuthConfig KubernetesAuth + if err := ParseAuthConfig(tm.authConfigBytes, &kubernetesAuthConfig); err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err) + } + + identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, kubernetesAuthConfig.IdentityID) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err) + } + + serviceAccountTokenPath := os.Getenv(util.INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME) + if serviceAccountTokenPath == "" { + serviceAccountTokenPath = kubernetesAuthConfig.ServiceAccountToken + if serviceAccountTokenPath == "" { + serviceAccountTokenPath = "/var/run/secrets/kubernetes.io/serviceaccount/token" + } + } + + return tm.infisicalClient.Auth().KubernetesAuthLogin(identityId, serviceAccountTokenPath) + +} + +func (tm *AgentManager) FetchAzureAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) { + + var azureAuthConfig AzureAuth + if err := ParseAuthConfig(tm.authConfigBytes, &azureAuthConfig); err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err) + } + + identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, azureAuthConfig.IdentityID) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err) + } + + return tm.infisicalClient.Auth().AzureAuthLogin(identityId, "") + +} + +func (tm *AgentManager) FetchGcpIdTokenAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) { + + var gcpIdTokenAuthConfig GcpIdTokenAuth + if err := ParseAuthConfig(tm.authConfigBytes, &gcpIdTokenAuthConfig); err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err) + } + + identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, gcpIdTokenAuthConfig.IdentityID) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err) + } + + return tm.infisicalClient.Auth().GcpIdTokenAuthLogin(identityId) + +} + +func (tm *AgentManager) FetchGcpIamAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) { + + var gcpIamAuthConfig GcpIamAuth + if err := ParseAuthConfig(tm.authConfigBytes, &gcpIamAuthConfig); err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err) + } + + identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, gcpIamAuthConfig.IdentityID) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err) + } + + serviceAccountKeyPath := os.Getenv(util.INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME) + if serviceAccountKeyPath == "" { + // we don't need to read this file, because the service account key path is directly read inside the sdk + serviceAccountKeyPath = gcpIamAuthConfig.ServiceAccountKey + if serviceAccountKeyPath == "" { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("gcp service account key path not found") + } + } + + return tm.infisicalClient.Auth().GcpIamAuthLogin(identityId, serviceAccountKeyPath) + +} + +func (tm *AgentManager) FetchAwsIamAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) { + + var awsIamAuthConfig AwsIamAuth + if err := ParseAuthConfig(tm.authConfigBytes, &awsIamAuthConfig); err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err) + } + + identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, awsIamAuthConfig.IdentityID) + + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err) + } + + return tm.infisicalClient.Auth().AwsIamAuthLogin(identityId) + +} + // Fetches a new access token using client credentials func (tm *AgentManager) FetchNewAccessToken() error { - clientID := os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME) - if clientID == "" { - clientIDAsByte, err := ReadFile(tm.clientIdPath) - if err != nil { - return fmt.Errorf("unable to read client id from file path '%s' due to error: %v", tm.clientIdPath, err) - } - clientID = string(clientIDAsByte) + + authStrategies := map[util.AuthStrategyType]func() (credential infisicalSdk.MachineIdentityCredential, e error){ + util.AuthStrategy.UNIVERSAL_AUTH: tm.FetchUniversalAuthAccessToken, + util.AuthStrategy.KUBERNETES_AUTH: tm.FetchKubernetesAuthAccessToken, + util.AuthStrategy.AZURE_AUTH: tm.FetchAzureAuthAccessToken, + util.AuthStrategy.GCP_ID_TOKEN_AUTH: tm.FetchGcpIdTokenAuthAccessToken, + util.AuthStrategy.GCP_IAM_AUTH: tm.FetchGcpIamAuthAccessToken, + util.AuthStrategy.AWS_IAM_AUTH: tm.FetchAwsIamAuthAccessToken, } - clientSecret := os.Getenv("INFISICAL_UNIVERSAL_CLIENT_SECRET") - if clientSecret == "" { - clientSecretAsByte, err := ReadFile(tm.clientSecretPath) - if err != nil { - if len(tm.cachedClientSecret) == 0 { - return fmt.Errorf("unable to read client secret from file and no cached client secret found: %v", err) - } else { - clientSecretAsByte = []byte(tm.cachedClientSecret) - } - } - clientSecret = string(clientSecretAsByte) + if _, ok := authStrategies[tm.authStrategy]; !ok { + return fmt.Errorf("auth strategy %s not found", tm.authStrategy) } - // remove client secret after first read - if tm.removeClientSecretOnRead { - os.Remove(tm.clientSecretPath) - } + credential, err := authStrategies[tm.authStrategy]() - // save as cache in memory - tm.cachedClientSecret = clientSecret - - loginResponse, err := util.UniversalAuthLogin(clientID, clientSecret) if err != nil { return err } - accessTokenTTL := time.Duration(loginResponse.AccessTokenTTL * int(time.Second)) - accessTokenMaxTTL := time.Duration(loginResponse.AccessTokenMaxTTL * int(time.Second)) + accessTokenTTL := time.Duration(credential.ExpiresIn * int64(time.Second)) + accessTokenMaxTTL := time.Duration(credential.AccessTokenMaxTTL * int64(time.Second)) if accessTokenTTL <= time.Duration(5)*time.Second { - util.PrintErrorMessageAndExit("At this this, agent does not support refresh of tokens with 5 seconds or less ttl. Please increase access token ttl and try again") + util.PrintErrorMessageAndExit("At this time, agent does not support refresh of tokens with 5 seconds or less ttl. Please increase access token ttl and try again") } tm.accessTokenFetchedTime = time.Now() - tm.SetToken(loginResponse.AccessToken, accessTokenTTL, accessTokenMaxTTL) + tm.SetToken(credential.AccessToken, accessTokenTTL, accessTokenMaxTTL) return nil } @@ -527,7 +722,7 @@ func (tm *AgentManager) RefreshAccessToken() error { SetRetryWaitTime(5 * time.Second) accessToken := tm.GetToken() - response, err := api.CallUniversalAuthRefreshAccessToken(httpClient, api.UniversalAuthRefreshRequest{AccessToken: accessToken}) + response, err := api.CallMachineIdentityRefreshAccessToken(httpClient, api.UniversalAuthRefreshRequest{AccessToken: accessToken}) if err != nil { return err } @@ -564,6 +759,7 @@ func (tm *AgentManager) ManageTokenLifecycle() { continue } } else if time.Now().After(accessTokenMaxTTLExpiresInTime) { + // case: token has reached max ttl and we should re-authenticate entirely (cannot refresh) log.Info().Msgf("token has reached max ttl, attempting to re authenticate...") err := tm.FetchNewAccessToken() if err != nil { @@ -574,6 +770,7 @@ func (tm *AgentManager) ManageTokenLifecycle() { continue } } else { + // case: token ttl has expired, but the token is still within max ttl, so we can refresh log.Info().Msgf("attempting to refresh existing token...") err := tm.RefreshAccessToken() if err != nil { @@ -672,6 +869,8 @@ func (tm *AgentManager) MonitorSecretChanges(secretTemplate Template, templateId if secretTemplate.SourcePath != "" { processedTemplate, err = ProcessTemplate(templateId, secretTemplate.SourcePath, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases) + } else if secretTemplate.TemplateContent != "" { + processedTemplate, err = ProcessLiteralTemplate(templateId, secretTemplate.TemplateContent, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases) } else { processedTemplate, err = ProcessBase64Template(templateId, secretTemplate.Base64TemplateContent, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases) } @@ -770,18 +969,33 @@ var agentCmd = &cobra.Command{ return } - if agentConfig.Auth.Type != "universal-auth" { - util.PrintErrorMessageAndExit("Only auth type of 'universal-auth' is supported at this time") - } + authMethodValid, authStrategy := util.IsAuthMethodValid(agentConfig.Auth.Type, false) - configUniversalAuthType := agentConfig.Auth.Config.(UniversalAuth) + if !authMethodValid { + util.PrintErrorMessageAndExit(fmt.Sprintf("The auth method '%s' is not supported.", agentConfig.Auth.Type)) + } tokenRefreshNotifier := make(chan bool) sigChan := make(chan os.Signal, 1) signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM) filePaths := agentConfig.Sinks - tm := NewAgentManager(filePaths, agentConfig.Templates, configUniversalAuthType.ClientIDPath, configUniversalAuthType.ClientSecretPath, tokenRefreshNotifier, configUniversalAuthType.RemoveClientSecretOnRead, agentConfig.Infisical.ExitAfterAuth) + + configBytes, err := yaml.Marshal(agentConfig.Auth.Config) + if err != nil { + log.Error().Msgf("unable to marshal auth config because %v", err) + return + } + + tm := NewAgentManager(NewAgentMangerOptions{ + FileDeposits: filePaths, + Templates: agentConfig.Templates, + AuthConfigBytes: configBytes, + NewAccessTokenNotificationChan: tokenRefreshNotifier, + ExitAfterAuth: agentConfig.Infisical.ExitAfterAuth, + AuthStrategy: authStrategy, + }) + tm.dynamicSecretLeases = NewDynamicSecretLeaseManager(sigChan) go tm.ManageTokenLifecycle() diff --git a/cli/packages/cmd/export.go b/cli/packages/cmd/export.go index 983c192551..6f02408fdf 100644 --- a/cli/packages/cmd/export.go +++ b/cli/packages/cmd/export.go @@ -14,6 +14,7 @@ import ( "github.com/Infisical/infisical-merge/packages/util" "github.com/rs/zerolog/log" "github.com/spf13/cobra" + "gopkg.in/yaml.v2" ) const ( @@ -55,6 +56,11 @@ var exportCmd = &cobra.Command{ util.HandleError(err) } + token, err := util.GetInfisicalToken(cmd) + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + format, err := cmd.Flags().GetString("format") if err != nil { util.HandleError(err) @@ -70,11 +76,6 @@ var exportCmd = &cobra.Command{ util.HandleError(err, "Unable to parse flag") } - token, err := util.GetInfisicalToken(cmd) - if err != nil { - util.HandleError(err, "Unable to parse flag") - } - tagSlugs, err := cmd.Flags().GetString("tags") if err != nil { util.HandleError(err, "Unable to parse flag") @@ -86,11 +87,12 @@ var exportCmd = &cobra.Command{ } request := models.GetAllSecretsParameters{ - Environment: environmentName, - TagSlugs: tagSlugs, - WorkspaceId: projectId, - SecretsPath: secretsPath, - IncludeImport: includeImports, + Environment: environmentName, + TagSlugs: tagSlugs, + WorkspaceId: projectId, + SecretsPath: secretsPath, + IncludeImport: includeImports, + ExpandSecretReferences: shouldExpandSecrets, } if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { @@ -136,18 +138,6 @@ var exportCmd = &cobra.Command{ } var output string - if shouldExpandSecrets { - - authParams := models.ExpandSecretsAuthentication{} - - if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { - authParams.InfisicalToken = token.Token - } else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER { - authParams.UniversalAuthAccessToken = token.Token - } - - secrets = util.ExpandSecrets(secrets, authParams, "") - } secrets = util.FilterSecretsByTag(secrets, tagSlugs) secrets = util.SortSecretsByKeys(secrets) @@ -169,9 +159,9 @@ func init() { exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)") exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets") exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets") - exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token") + exportCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs") - exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from") + exportCmd.Flags().String("projectId", "", "manually set the projectId to export secrets from") exportCmd.Flags().String("path", "/", "get secrets within a folder path") exportCmd.Flags().String("template", "", "The path to the template file used to render secrets") } @@ -188,7 +178,7 @@ func formatEnvs(envs []models.SingleEnvironmentVariable, format string) (string, case FormatCSV: return formatAsCSV(envs), nil case FormatYaml: - return formatAsYaml(envs), nil + return formatAsYaml(envs) default: return "", fmt.Errorf("invalid format type: %s. Available format types are [%s]", format, []string{FormatDotenv, FormatJson, FormatCSV, FormatYaml, FormatDotEnvExport}) } @@ -224,12 +214,18 @@ func formatAsDotEnvExport(envs []models.SingleEnvironmentVariable) string { return dotenv } -func formatAsYaml(envs []models.SingleEnvironmentVariable) string { - var dotenv string +func formatAsYaml(envs []models.SingleEnvironmentVariable) (string, error) { + m := make(map[string]string) for _, env := range envs { - dotenv += fmt.Sprintf("%s: %s\n", env.Key, env.Value) + m[env.Key] = env.Value } - return dotenv + + yamlBytes, err := yaml.Marshal(m) + if err != nil { + return "", fmt.Errorf("failed to format environment variables as YAML: %w", err) + } + + return string(yamlBytes), nil } // Format environment variables as a JSON file diff --git a/cli/packages/cmd/export_test.go b/cli/packages/cmd/export_test.go new file mode 100644 index 0000000000..1be0a7ed27 --- /dev/null +++ b/cli/packages/cmd/export_test.go @@ -0,0 +1,79 @@ +package cmd + +import ( + "testing" + + "github.com/Infisical/infisical-merge/packages/models" + "github.com/stretchr/testify/assert" + "gopkg.in/yaml.v2" +) + +func TestFormatAsYaml(t *testing.T) { + tests := []struct { + name string + input []models.SingleEnvironmentVariable + expected string + }{ + { + name: "Empty input", + input: []models.SingleEnvironmentVariable{}, + expected: "{}\n", + }, + { + name: "Single environment variable", + input: []models.SingleEnvironmentVariable{ + {Key: "KEY1", Value: "VALUE1"}, + }, + expected: "KEY1: VALUE1\n", + }, + { + name: "Multiple environment variables", + input: []models.SingleEnvironmentVariable{ + {Key: "KEY1", Value: "VALUE1"}, + {Key: "KEY2", Value: "VALUE2"}, + {Key: "KEY3", Value: "VALUE3"}, + }, + expected: "KEY1: VALUE1\nKEY2: VALUE2\nKEY3: VALUE3\n", + }, + { + name: "Overwriting duplicate keys", + input: []models.SingleEnvironmentVariable{ + {Key: "KEY1", Value: "VALUE1"}, + {Key: "KEY1", Value: "VALUE2"}, + }, + expected: "KEY1: VALUE2\n", + }, + { + name: "Special characters in values", + input: []models.SingleEnvironmentVariable{ + {Key: "KEY1", Value: "Value with spaces"}, + {Key: "KEY2", Value: "Value:with:colons"}, + {Key: "KEY3", Value: "Value\nwith\nnewlines"}, + }, + expected: "KEY1: Value with spaces\nKEY2: Value:with:colons\nKEY3: |-\n Value\n with\n newlines\n", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := formatAsYaml(tt.input) + assert.NoError(t, err) + + // Compare the result with the expected output + assert.Equal(t, tt.expected, result) + + // Additionally, parse the result back into a map to ensure it's valid YAML + var resultMap map[string]string + err = yaml.Unmarshal([]byte(result), &resultMap) + assert.NoError(t, err) + + // Create an expected map from the input + expectedMap := make(map[string]string) + for _, env := range tt.input { + expectedMap[env.Key] = env.Value + } + + assert.Equal(t, expectedMap, resultMap) + }) + } +} diff --git a/cli/packages/cmd/folder.go b/cli/packages/cmd/folder.go index 9cb76a312b..538f1e2dc0 100644 --- a/cli/packages/cmd/folder.go +++ b/cli/packages/cmd/folder.go @@ -1,6 +1,7 @@ package cmd import ( + "errors" "fmt" "github.com/Infisical/infisical-merge/packages/models" @@ -71,10 +72,6 @@ var getCmd = &cobra.Command{ var createCmd = &cobra.Command{ Use: "create", Short: "Create a folder", - PersistentPreRun: func(cmd *cobra.Command, args []string) { - util.RequireLogin() - util.RequireLocalWorkspaceFile() - }, Run: func(cmd *cobra.Command, args []string) { environmentName, _ := cmd.Flags().GetString("env") if !cmd.Flags().Changed("env") { @@ -84,6 +81,16 @@ var createCmd = &cobra.Command{ } } + token, err := util.GetInfisicalToken(cmd) + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + projectId, err := cmd.Flags().GetString("projectId") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + folderPath, err := cmd.Flags().GetString("path") if err != nil { util.HandleError(err, "Unable to parse flag") @@ -95,19 +102,31 @@ var createCmd = &cobra.Command{ } if folderName == "" { - util.HandleError(fmt.Errorf("Invalid folder name"), "Folder name cannot be empty") + util.HandleError(errors.New("invalid folder name, folder name cannot be empty")) } - workspaceFile, err := util.GetWorkSpaceFromFile() if err != nil { util.HandleError(err, "Unable to get workspace file") } + if projectId == "" { + workspaceFile, err := util.GetWorkSpaceFromFile() + if err != nil { + util.HandleError(err, "Unable to get workspace file") + } + + projectId = workspaceFile.WorkspaceId + } + params := models.CreateFolderParameters{ FolderName: folderName, - WorkspaceId: workspaceFile.WorkspaceId, Environment: environmentName, FolderPath: folderPath, + WorkspaceId: projectId, + } + + if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) { + params.InfisicalToken = token.Token } _, err = util.CreateFolder(params) @@ -124,10 +143,6 @@ var createCmd = &cobra.Command{ var deleteCmd = &cobra.Command{ Use: "delete", Short: "Delete a folder", - PersistentPreRun: func(cmd *cobra.Command, args []string) { - util.RequireLogin() - util.RequireLocalWorkspaceFile() - }, Run: func(cmd *cobra.Command, args []string) { environmentName, _ := cmd.Flags().GetString("env") @@ -138,6 +153,16 @@ var deleteCmd = &cobra.Command{ } } + token, err := util.GetInfisicalToken(cmd) + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + projectId, err := cmd.Flags().GetString("projectId") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + folderPath, err := cmd.Flags().GetString("path") if err != nil { util.HandleError(err, "Unable to parse flag") @@ -149,21 +174,29 @@ var deleteCmd = &cobra.Command{ } if folderName == "" { - util.HandleError(fmt.Errorf("Invalid folder name"), "Folder name cannot be empty") + util.HandleError(errors.New("invalid folder name, folder name cannot be empty")) } - workspaceFile, err := util.GetWorkSpaceFromFile() - if err != nil { - util.HandleError(err, "Unable to get workspace file") + if projectId == "" { + workspaceFile, err := util.GetWorkSpaceFromFile() + if err != nil { + util.HandleError(err, "Unable to get workspace file") + } + + projectId = workspaceFile.WorkspaceId } params := models.DeleteFolderParameters{ FolderName: folderName, - WorkspaceId: workspaceFile.WorkspaceId, + WorkspaceId: projectId, Environment: environmentName, FolderPath: folderPath, } + if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) { + params.InfisicalToken = token.Token + } + _, err = util.DeleteFolder(params) if err != nil { util.HandleError(err, "Unable to delete folder") diff --git a/cli/packages/cmd/init.go b/cli/packages/cmd/init.go index 99d2ef5027..05655e97cd 100644 --- a/cli/packages/cmd/init.go +++ b/cli/packages/cmd/init.go @@ -5,6 +5,7 @@ package cmd import ( "encoding/json" + "fmt" "github.com/Infisical/infisical-merge/packages/api" "github.com/Infisical/infisical-merge/packages/models" @@ -75,6 +76,43 @@ var initCmd = &cobra.Command{ selectedOrganization := organizations[index] tokenResponse, err := api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID}) + if tokenResponse.MfaEnabled { + i := 1 + for i < 6 { + mfaVerifyCode := askForMFACode(tokenResponse.MfaMethod) + + httpClient := resty.New() + httpClient.SetAuthToken(tokenResponse.Token) + verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{ + Email: userCreds.UserCredentials.Email, + MFAToken: mfaVerifyCode, + MFAMethod: tokenResponse.MfaMethod, + }) + if requestError != nil { + util.HandleError(err) + break + } else if mfaErrorResponse != nil { + if mfaErrorResponse.Context.Code == "mfa_invalid" { + msg := fmt.Sprintf("Incorrect, verification code. You have %v attempts left", 5-i) + fmt.Println(msg) + if i == 5 { + util.PrintErrorMessageAndExit("No tries left, please try again in a bit") + break + } + } + + if mfaErrorResponse.Context.Code == "mfa_expired" { + util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again") + break + } + i++ + } else { + httpClient.SetAuthToken(verifyMFAresponse.Token) + tokenResponse, err = api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID}) + break + } + } + } if err != nil { util.HandleError(err, "Unable to select organization") diff --git a/cli/packages/cmd/login.go b/cli/packages/cmd/login.go index 61e24b12f0..fff2ccf311 100644 --- a/cli/packages/cmd/login.go +++ b/cli/packages/cmd/login.go @@ -4,10 +4,12 @@ Copyright (c) 2023 Infisical Inc. package cmd import ( + "context" "encoding/base64" "encoding/hex" "encoding/json" "os" + "slices" "strings" "time" @@ -27,13 +29,14 @@ import ( "github.com/fatih/color" "github.com/go-resty/resty/v2" "github.com/manifoldco/promptui" - "github.com/pkg/browser" "github.com/posthog/posthog-go" "github.com/rs/cors" "github.com/rs/zerolog/log" "github.com/spf13/cobra" "golang.org/x/crypto/argon2" "golang.org/x/term" + + infisicalSdk "github.com/infisical/go-sdk" ) type params struct { @@ -44,6 +47,101 @@ type params struct { keyLength uint32 } +func handleUniversalAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) { + + clientId, err := util.GetCmdFlagOrEnv(cmd, "client-id", util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME) + + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + clientSecret, err := util.GetCmdFlagOrEnv(cmd, "client-secret", util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + return infisicalClient.Auth().UniversalAuthLogin(clientId, clientSecret) +} + +func handleKubernetesAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) { + + identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + serviceAccountTokenPath, err := util.GetCmdFlagOrEnv(cmd, "service-account-token-path", util.INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + return infisicalClient.Auth().KubernetesAuthLogin(identityId, serviceAccountTokenPath) +} + +func handleAzureAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) { + + identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + return infisicalClient.Auth().AzureAuthLogin(identityId, "") +} + +func handleGcpIdTokenAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) { + + identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + return infisicalClient.Auth().GcpIdTokenAuthLogin(identityId) +} + +func handleGcpIamAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) { + + identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + serviceAccountKeyFilePath, err := util.GetCmdFlagOrEnv(cmd, "service-account-key-file-path", util.INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + return infisicalClient.Auth().GcpIamAuthLogin(identityId, serviceAccountKeyFilePath) +} + +func handleAwsIamAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) { + + identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + return infisicalClient.Auth().AwsIamAuthLogin(identityId) +} + +func handleOidcAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) { + + identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + jwt, err := util.GetCmdFlagOrEnv(cmd, "oidc-jwt", util.INFISICAL_OIDC_AUTH_JWT_NAME) + if err != nil { + return infisicalSdk.MachineIdentityCredential{}, err + } + + return infisicalClient.Auth().OidcAuthLogin(identityId, jwt) +} + +func formatAuthMethod(authMethod string) string { + return strings.ReplaceAll(authMethod, "-", " ") +} + const ADD_USER = "Add a new account login" const REPLACE_USER = "Override current logged in user" const EXIT_USER_MENU = "Exit" @@ -56,6 +154,34 @@ var loginCmd = &cobra.Command{ DisableFlagsInUseLine: true, Run: func(cmd *cobra.Command, args []string) { + clearSelfHostedDomains, err := cmd.Flags().GetBool("clear-domains") + if err != nil { + util.HandleError(err) + } + + if clearSelfHostedDomains { + infisicalConfig, err := util.GetConfigFile() + if err != nil { + util.HandleError(err) + } + + infisicalConfig.Domains = []string{} + err = util.WriteConfigFile(&infisicalConfig) + + if err != nil { + util.HandleError(err) + } + + fmt.Println("Cleared all self-hosted domains from the config file") + return + } + + infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{ + SiteUrl: config.INFISICAL_URL, + UserAgent: api.USER_AGENT, + AutoTokenRefresh: false, + }) + loginMethod, err := cmd.Flags().GetString("method") if err != nil { util.HandleError(err) @@ -65,12 +191,13 @@ var loginCmd = &cobra.Command{ util.HandleError(err) } - if loginMethod != "user" && loginMethod != "universal-auth" { - util.PrintErrorMessageAndExit("Invalid login method. Please use either 'user' or 'universal-auth'") + authMethodValid, strategy := util.IsAuthMethodValid(loginMethod, true) + if !authMethodValid { + util.PrintErrorMessageAndExit(fmt.Sprintf("Invalid login method: %s", loginMethod)) } + // standalone user auth if loginMethod == "user" { - currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails() // if the key can't be found or there is an error getting current credentials from key ring, allow them to override if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) { @@ -91,7 +218,9 @@ var loginCmd = &cobra.Command{ } //override domain domainQuery := true - if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL { + if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && + config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_EU_URL) && + config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL) { overrideDomain, err := DomainOverridePrompt() if err != nil { util.HandleError(err) @@ -102,11 +231,12 @@ var loginCmd = &cobra.Command{ if !overrideDomain { domainQuery = false config.INFISICAL_URL = util.AppendAPIEndpoint(config.INFISICAL_URL_MANUAL_OVERRIDE) + config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", strings.TrimSuffix(config.INFISICAL_URL, "/api")) } } - //prompt user to select domain between Infisical cloud and self hosting + //prompt user to select domain between Infisical cloud and self-hosting if domainQuery { err = askForDomain() if err != nil { @@ -123,9 +253,9 @@ var loginCmd = &cobra.Command{ //call browser login function if !interactiveLogin { - fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]") userCredentialsToBeStored, err = browserCliLogin() if err != nil { + fmt.Printf("Login via browser failed. %s", err.Error()) //default to cli login on error cliDefaultLogin(&userCredentialsToBeStored) } @@ -133,7 +263,7 @@ var loginCmd = &cobra.Command{ err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored) if err != nil { - log.Error().Msgf("Unable to store your credentials in system vault [%s]") + log.Error().Msgf("Unable to store your credentials in system vault") log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq") log.Debug().Err(err) //return here @@ -160,47 +290,34 @@ var loginCmd = &cobra.Command{ fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage") fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack") Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION)) - } else if loginMethod == "universal-auth" { + } else { - clientId, err := cmd.Flags().GetString("client-id") - if err != nil { - util.HandleError(err) + authStrategies := map[util.AuthStrategyType]func(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error){ + util.AuthStrategy.UNIVERSAL_AUTH: handleUniversalAuthLogin, + util.AuthStrategy.KUBERNETES_AUTH: handleKubernetesAuthLogin, + util.AuthStrategy.AZURE_AUTH: handleAzureAuthLogin, + util.AuthStrategy.GCP_ID_TOKEN_AUTH: handleGcpIdTokenAuthLogin, + util.AuthStrategy.GCP_IAM_AUTH: handleGcpIamAuthLogin, + util.AuthStrategy.AWS_IAM_AUTH: handleAwsIamAuthLogin, + util.AuthStrategy.OIDC_AUTH: handleOidcAuthLogin, } - clientSecret, err := cmd.Flags().GetString("client-secret") - if err != nil { - util.HandleError(err) - } - - if clientId == "" { - clientId = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME) - if clientId == "" { - util.PrintErrorMessageAndExit("Please provide client-id") - } - } - if clientSecret == "" { - clientSecret = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME) - if clientSecret == "" { - util.PrintErrorMessageAndExit("Please provide client-secret") - } - } - - res, err := util.UniversalAuthLogin(clientId, clientSecret) + credential, err := authStrategies[strategy](cmd, infisicalClient) if err != nil { - util.HandleError(err) + util.HandleError(fmt.Errorf("unable to authenticate with %s [err=%v]", formatAuthMethod(loginMethod), err)) } if plainOutput { - fmt.Println(res.AccessToken) + fmt.Println(credential.AccessToken) return } boldGreen := color.New(color.FgGreen).Add(color.Bold) boldPlain := color.New(color.Bold) time.Sleep(time.Second * 1) - boldGreen.Printf(">>>> Successfully authenticated with Universal Auth!\n\n") - boldPlain.Printf("Universal Auth Access Token:\n%v", res.AccessToken) + boldGreen.Printf(">>>> Successfully authenticated with %s!\n\n", formatAuthMethod(loginMethod)) + boldPlain.Printf("Access Token:\n%v", credential.AccessToken) plainBold := color.New(color.Bold) plainBold.Println("\n\nYou can use this access token to authenticate through other commands in the CLI.") @@ -226,7 +343,7 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) { if loginTwoResponse.MfaEnabled { i := 1 for i < 6 { - mfaVerifyCode := askForMFACode() + mfaVerifyCode := askForMFACode("email") httpClient := resty.New() httpClient.SetAuthToken(loginTwoResponse.Token) @@ -364,7 +481,7 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) { util.PrintErrorMessageAndExit("We were unable to fetch required details to complete your login. Run with -d to see more info") } // Login is successful so ask user to choose organization - newJwtToken := GetJwtTokenWithOrganizationId(loginTwoResponse.Token) + newJwtToken := GetJwtTokenWithOrganizationId(loginTwoResponse.Token, email) //updating usercredentials userCredentialsToBeStored.Email = email @@ -374,11 +491,16 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) { func init() { rootCmd.AddCommand(loginCmd) + loginCmd.Flags().Bool("clear-domains", false, "clear all self-hosting domains from the config file") loginCmd.Flags().BoolP("interactive", "i", false, "login via the command line") loginCmd.Flags().String("method", "user", "login method [user, universal-auth]") - loginCmd.Flags().String("client-id", "", "client id for universal auth") loginCmd.Flags().Bool("plain", false, "only output the token without any formatting") + loginCmd.Flags().String("client-id", "", "client id for universal auth") loginCmd.Flags().String("client-secret", "", "client secret for universal auth") + loginCmd.Flags().String("machine-identity-id", "", "machine identity id for kubernetes, azure, gcp-id-token, gcp-iam, and aws-iam auth methods") + loginCmd.Flags().String("service-account-token-path", "", "service account token path for kubernetes auth") + loginCmd.Flags().String("service-account-key-file-path", "", "service account key file path for GCP IAM auth") + loginCmd.Flags().String("oidc-jwt", "", "JWT for OIDC authentication") } func DomainOverridePrompt() (bool, error) { @@ -405,17 +527,20 @@ func DomainOverridePrompt() (bool, error) { } func askForDomain() error { - //query user to choose between Infisical cloud or self hosting + + // query user to choose between Infisical cloud or self-hosting const ( - INFISICAL_CLOUD = "Infisical Cloud" - SELF_HOSTING = "Self Hosting" + INFISICAL_CLOUD_US = "Infisical Cloud (US Region)" + INFISICAL_CLOUD_EU = "Infisical Cloud (EU Region)" + SELF_HOSTING = "Self-Hosting or Dedicated Instance" + ADD_NEW_DOMAIN = "Add a new domain" ) - options := []string{INFISICAL_CLOUD, SELF_HOSTING} + options := []string{INFISICAL_CLOUD_US, INFISICAL_CLOUD_EU, SELF_HOSTING} optionsPrompt := promptui.Select{ Label: "Select your hosting option", Items: options, - Size: 2, + Size: 3, } _, selectedHostingOption, err := optionsPrompt.Run() @@ -423,11 +548,46 @@ func askForDomain() error { return err } - if selectedHostingOption == INFISICAL_CLOUD { - //cloud option - config.INFISICAL_URL = fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_URL) - config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", util.INFISICAL_DEFAULT_URL) + if selectedHostingOption == INFISICAL_CLOUD_US { + // US cloud option + config.INFISICAL_URL = fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL) + config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", util.INFISICAL_DEFAULT_US_URL) return nil + } else if selectedHostingOption == INFISICAL_CLOUD_EU { + // EU cloud option + config.INFISICAL_URL = fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_EU_URL) + config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", util.INFISICAL_DEFAULT_EU_URL) + return nil + } + + infisicalConfig, err := util.GetConfigFile() + if err != nil { + return fmt.Errorf("askForDomain: unable to get config file because [err=%s]", err) + } + + if infisicalConfig.Domains != nil && len(infisicalConfig.Domains) > 0 { + // If domains are present in the config, let the user select from the list or select to add a new domain + + items := append(infisicalConfig.Domains, ADD_NEW_DOMAIN) + + prompt := promptui.Select{ + Label: "Which domain would you like to use?", + Items: items, + Size: 5, + } + + _, selectedOption, err := prompt.Run() + if err != nil { + return err + } + + if selectedOption != ADD_NEW_DOMAIN { + config.INFISICAL_URL = fmt.Sprintf("%s/api", selectedOption) + config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", selectedOption) + return nil + + } + } urlValidation := func(input string) error { @@ -448,12 +608,23 @@ func askForDomain() error { if err != nil { return err } - //trimmed the '/' from the end of the self hosting url + + // Trimmed the '/' from the end of the self-hosting url, and set the api & login url domain = strings.TrimRight(domain, "/") - //set api and login url config.INFISICAL_URL = fmt.Sprintf("%s/api", domain) config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", domain) - //return nil + + // Write the new domain to the config file, to allow the user to select it in the future if needed + // First check if infiscialConfig.Domains already includes the domain, if it does, do not add it again + if !slices.Contains(infisicalConfig.Domains, domain) { + infisicalConfig.Domains = append(infisicalConfig.Domains, domain) + err = util.WriteConfigFile(&infisicalConfig) + + if err != nil { + return fmt.Errorf("askForDomain: unable to write domains to config file because [err=%s]", err) + } + } + return nil } @@ -539,6 +710,7 @@ func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2R loginTwoResponseResult, err := api.CallLogin2V2(httpClient, api.GetLoginTwoV2Request{ Email: email, ClientProof: hex.EncodeToString(srpM1), + Password: password, }) if err != nil { @@ -548,7 +720,7 @@ func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2R return &loginOneResponseResult, &loginTwoResponseResult, nil } -func GetJwtTokenWithOrganizationId(oldJwtToken string) string { +func GetJwtTokenWithOrganizationId(oldJwtToken string, email string) string { log.Debug().Msg(fmt.Sprint("GetJwtTokenWithOrganizationId: ", "oldJwtToken", oldJwtToken)) httpClient := resty.New() @@ -577,11 +749,52 @@ func GetJwtTokenWithOrganizationId(oldJwtToken string) string { selectedOrganization := organizations[index] selectedOrgRes, err := api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID}) - if err != nil { util.HandleError(err) } + if selectedOrgRes.MfaEnabled { + i := 1 + for i < 6 { + mfaVerifyCode := askForMFACode(selectedOrgRes.MfaMethod) + + httpClient := resty.New() + httpClient.SetAuthToken(selectedOrgRes.Token) + verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{ + Email: email, + MFAToken: mfaVerifyCode, + MFAMethod: selectedOrgRes.MfaMethod, + }) + if requestError != nil { + util.HandleError(err) + break + } else if mfaErrorResponse != nil { + if mfaErrorResponse.Context.Code == "mfa_invalid" { + msg := fmt.Sprintf("Incorrect, verification code. You have %v attempts left", 5-i) + fmt.Println(msg) + if i == 5 { + util.PrintErrorMessageAndExit("No tries left, please try again in a bit") + break + } + } + + if mfaErrorResponse.Context.Code == "mfa_expired" { + util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again") + break + } + i++ + } else { + httpClient.SetAuthToken(verifyMFAresponse.Token) + selectedOrgRes, err = api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID}) + break + } + } + } + + if err != nil { + util.HandleError(err, "Unable to select organization") + } + return selectedOrgRes.Token } @@ -605,9 +818,15 @@ func generateFromPassword(password string, salt []byte, p *params) (hash []byte, return hash, nil } -func askForMFACode() string { +func askForMFACode(mfaMethod string) string { + var label string + if mfaMethod == "totp" { + label = "Enter the verification code from your mobile authenticator app or use a recovery code" + } else { + label = "Enter the 2FA verification code sent to your email" + } mfaCodePromptUI := promptui.Prompt{ - Label: "Enter the 2FA verification code sent to your email", + Label: label, } mfaVerifyCode, err := mfaCodePromptUI.Run() @@ -618,10 +837,62 @@ func askForMFACode() string { return mfaVerifyCode } +func askToPasteJwtToken(success chan models.UserCredentials, failure chan error) { + time.Sleep(time.Second * 5) + fmt.Println("\n\nOnce login is completed via browser, the CLI should be authenticated automatically.") + fmt.Println("However, if browser fails to communicate with the CLI, please paste the token from the browser below.") + + fmt.Print("\n\nToken: ") + bytePassword, err := term.ReadPassword(int(os.Stdin.Fd())) + if err != nil { + failure <- err + fmt.Println("\nError reading input:", err) + os.Exit(1) + } + + infisicalPastedToken := strings.TrimSpace(string(bytePassword)) + + userCredentials, err := decodePastedBase64Token(infisicalPastedToken) + if err != nil { + failure <- err + fmt.Println("Invalid user credentials provided", err) + os.Exit(1) + } + + // verify JTW + httpClient := resty.New(). + SetAuthToken(userCredentials.JTWToken). + SetHeader("Accept", "application/json") + + isAuthenticated := api.CallIsAuthenticated(httpClient) + if !isAuthenticated { + fmt.Println("Invalid user credentials provided", err) + failure <- err + os.Exit(1) + } + + success <- *userCredentials +} + +func decodePastedBase64Token(token string) (*models.UserCredentials, error) { + data, err := base64.StdEncoding.DecodeString(token) + if err != nil { + return nil, err + } + var loginResponse models.UserCredentials + + err = json.Unmarshal(data, &loginResponse) + if err != nil { + return nil, err + } + + return &loginResponse, nil +} + // Manages the browser login flow. // Returns a UserCredentials object on success and an error on failure func browserCliLogin() (models.UserCredentials, error) { - SERVER_TIMEOUT := 60 * 10 + SERVER_TIMEOUT := 10 * 60 //create listener listener, err := net.Listen("tcp", "127.0.0.1:0") @@ -633,17 +904,12 @@ func browserCliLogin() (models.UserCredentials, error) { callbackPort := listener.Addr().(*net.TCPAddr).Port url := fmt.Sprintf("%s?callback_port=%d", config.INFISICAL_LOGIN_URL, callbackPort) - //open browser and login - err = browser.OpenURL(url) - if err != nil { - return models.UserCredentials{}, err - } + fmt.Printf("\n\nTo complete your login, open this address in your browser: %v \n", url) //flow channels success := make(chan models.UserCredentials) failure := make(chan error) timeout := time.After(time.Second * time.Duration(SERVER_TIMEOUT)) - quit := make(chan bool) //terminal state oldState, err := term.GetState(int(os.Stdin.Fd())) @@ -666,23 +932,22 @@ func browserCliLogin() (models.UserCredentials, error) { log.Debug().Msgf("Callback server listening on port %d", callbackPort) go http.Serve(listener, corsHandler) + go askToPasteJwtToken(success, failure) for { select { case loginResponse := <-success: _ = closeListener(&listener) + fmt.Println("Browser login successful") return loginResponse, nil - case <-failure: - err = closeListener(&listener) - return models.UserCredentials{}, err + case err := <-failure: + serverErr := closeListener(&listener) + return models.UserCredentials{}, errors.Join(err, serverErr) case <-timeout: _ = closeListener(&listener) return models.UserCredentials{}, errors.New("server timeout") - - case <-quit: - return models.UserCredentials{}, errors.New("quitting browser login, defaulting to cli...") } } } diff --git a/cli/packages/cmd/root.go b/cli/packages/cmd/root.go index 482c6f78a1..c533f34159 100644 --- a/cli/packages/cmd/root.go +++ b/cli/packages/cmd/root.go @@ -4,6 +4,7 @@ Copyright (c) 2023 Infisical Inc. package cmd import ( + "fmt" "os" "strings" @@ -39,18 +40,30 @@ func init() { cobra.OnInitialize(initLog) rootCmd.PersistentFlags().StringP("log-level", "l", "info", "log level (trace, debug, info, warn, error, fatal)") rootCmd.PersistentFlags().Bool("telemetry", true, "Infisical collects non-sensitive telemetry data to enhance features and improve user experience. Participation is voluntary") - rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", util.INFISICAL_DEFAULT_API_URL, "Point the CLI to your own backend [can also set via environment variable name: INFISICAL_API_URL]") + rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL), "Point the CLI to your own backend [can also set via environment variable name: INFISICAL_API_URL]") rootCmd.PersistentFlags().Bool("silent", false, "Disable output of tip/info messages. Useful when running in scripts or CI/CD pipelines.") rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) { silent, err := cmd.Flags().GetBool("silent") - config.INFISICAL_URL = util.AppendAPIEndpoint(config.INFISICAL_URL) if err != nil { util.HandleError(err) } + config.INFISICAL_URL = util.AppendAPIEndpoint(config.INFISICAL_URL) + if !util.IsRunningInDocker() && !silent { util.CheckForUpdate() } + + loggedInDetails, err := util.GetCurrentLoggedInUserDetails() + + if !silent && err == nil && loggedInDetails.IsUserLoggedIn && !loggedInDetails.LoginExpired { + token, err := util.GetInfisicalToken(cmd) + + if err == nil && token != nil { + util.PrintWarning(fmt.Sprintf("Your logged-in session is being overwritten by the token provided from the %s.", token.Source)) + } + } + } // if config.INFISICAL_URL is set to the default value, check if INFISICAL_URL is set in the environment diff --git a/cli/packages/cmd/run.go b/cli/packages/cmd/run.go index 04fe2588b6..a232896f1e 100644 --- a/cli/packages/cmd/run.go +++ b/cli/packages/cmd/run.go @@ -4,22 +4,27 @@ Copyright (c) 2023 Infisical Inc. package cmd import ( + "errors" "fmt" "os" "os/exec" "os/signal" "runtime" "strings" + "sync" "syscall" + "time" "github.com/Infisical/infisical-merge/packages/models" "github.com/Infisical/infisical-merge/packages/util" "github.com/fatih/color" - "github.com/posthog/posthog-go" "github.com/rs/zerolog/log" "github.com/spf13/cobra" ) +var ErrManualSignalInterrupt = errors.New("signal: interrupt") +var watcherWaitGroup = new(sync.WaitGroup) + // runCmd represents the run command var runCmd = &cobra.Command{ Example: ` @@ -77,11 +82,35 @@ var runCmd = &cobra.Command{ util.HandleError(err, "Unable to parse flag") } + command, err := cmd.Flags().GetString("command") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + secretOverriding, err := cmd.Flags().GetBool("secret-overriding") if err != nil { util.HandleError(err, "Unable to parse flag") } + watchMode, err := cmd.Flags().GetBool("watch") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + watchModeInterval, err := cmd.Flags().GetInt("watch-interval") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + // If the --watch flag has been set, the --watch-interval flag should also be set + if watchMode && watchModeInterval < 5 { + util.HandleError(fmt.Errorf("watch interval must be at least 5 seconds, you passed %d seconds", watchModeInterval)) + } + shouldExpandSecrets, err := cmd.Flags().GetBool("expand") if err != nil { util.HandleError(err, "Unable to parse flag") @@ -108,116 +137,59 @@ var runCmd = &cobra.Command{ } request := models.GetAllSecretsParameters{ - Environment: environmentName, - WorkspaceId: projectId, - TagSlugs: tagSlugs, - SecretsPath: secretsPath, - IncludeImport: includeImports, - Recursive: recursive, + Environment: environmentName, + WorkspaceId: projectId, + TagSlugs: tagSlugs, + SecretsPath: secretsPath, + IncludeImport: includeImports, + Recursive: recursive, + ExpandSecretReferences: shouldExpandSecrets, } - if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { - request.InfisicalToken = token.Token - } else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER { - request.UniversalAuthAccessToken = token.Token - } - - secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir) - + injectableEnvironment, err := fetchAndFormatSecretsForShell(request, projectConfigDir, secretOverriding, token) if err != nil { util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid") } - if secretOverriding { - secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL) + log.Debug().Msgf("injecting the following environment variables into shell: %v", injectableEnvironment.Variables) + + if watchMode { + executeCommandWithWatchMode(command, args, watchModeInterval, request, projectConfigDir, secretOverriding, token) } else { - secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED) - } + if cmd.Flags().Changed("command") { + command := cmd.Flag("command").Value.String() + err = executeMultipleCommandWithEnvs(command, injectableEnvironment.SecretsCount, injectableEnvironment.Variables) + if err != nil { + fmt.Println(err) + os.Exit(1) + } - if shouldExpandSecrets { - - authParams := models.ExpandSecretsAuthentication{} - - if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { - authParams.InfisicalToken = token.Token - } else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER { - authParams.UniversalAuthAccessToken = token.Token - } - - secrets = util.ExpandSecrets(secrets, authParams, projectConfigDir) - } - - secretsByKey := getSecretsByKeys(secrets) - environmentVariables := make(map[string]string) - - // add all existing environment vars - for _, s := range os.Environ() { - kv := strings.SplitN(s, "=", 2) - key := kv[0] - value := kv[1] - environmentVariables[key] = value - } - - // check to see if there are any reserved key words in secrets to inject - filterReservedEnvVars(secretsByKey) - - // now add infisical secrets - for k, v := range secretsByKey { - environmentVariables[k] = v.Value - } - - // turn it back into a list of envs - var env []string - for key, value := range environmentVariables { - s := key + "=" + value - env = append(env, s) - } - - log.Debug().Msgf("injecting the following environment variables into shell: %v", env) - - Telemetry.CaptureEvent("cli-command:run", - posthog.NewProperties(). - Set("secretsCount", len(secrets)). - Set("environment", environmentName). - Set("isUsingServiceToken", token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER). - Set("isUsingUniversalAuthToken", token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER). - Set("single-command", strings.Join(args, " ")). - Set("multi-command", cmd.Flag("command").Value.String()). - Set("version", util.CLI_VERSION)) - - if cmd.Flags().Changed("command") { - command := cmd.Flag("command").Value.String() - - err = executeMultipleCommandWithEnvs(command, len(secretsByKey), env) - if err != nil { - fmt.Println(err) - os.Exit(1) - } - - } else { - err = executeSingleCommandWithEnvs(args, len(secretsByKey), env) - if err != nil { - fmt.Println(err) - os.Exit(1) + } else { + err = executeSingleCommandWithEnvs(args, injectableEnvironment.SecretsCount, injectableEnvironment.Variables) + if err != nil { + fmt.Println(err) + os.Exit(1) + } } } + }, } -var ( - reservedEnvVars = []string{ - "HOME", "PATH", "PS1", "PS2", - "PWD", "EDITOR", "XAUTHORITY", "USER", - "TERM", "TERMINFO", "SHELL", "MAIL", - } - - reservedEnvVarPrefixes = []string{ - "XDG_", - "LC_", - } -) - func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) { + var ( + reservedEnvVars = []string{ + "HOME", "PATH", "PS1", "PS2", + "PWD", "EDITOR", "XAUTHORITY", "USER", + "TERM", "TERMINFO", "SHELL", "MAIL", + } + + reservedEnvVarPrefixes = []string{ + "XDG_", + "LC_", + } + ) + for _, reservedEnvName := range reservedEnvVars { if _, ok := env[reservedEnvName]; ok { delete(env, reservedEnvName) @@ -237,13 +209,15 @@ func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) { func init() { rootCmd.AddCommand(runCmd) - runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token") - runCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity") - runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from") - runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets") - runCmd.Flags().Bool("include-imports", true, "Import linked secrets ") - runCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders") - runCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets") + runCmd.Flags().String("token", "", "fetch secrets using service token or machine identity access token") + runCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth") + runCmd.Flags().StringP("env", "e", "dev", "set the environment (dev, prod, etc.) from which your secrets should be pulled from") + runCmd.Flags().Bool("expand", true, "parse shell parameter expansions in your secrets") + runCmd.Flags().Bool("include-imports", true, "import linked secrets ") + runCmd.Flags().Bool("recursive", false, "fetch secrets from all sub-folders") + runCmd.Flags().Bool("secret-overriding", true, "prioritizes personal secrets, if any, with the same name over shared secrets") + runCmd.Flags().Bool("watch", false, "enable reload of application when secrets change") + runCmd.Flags().Int("watch-interval", 10, "interval in seconds to check for secret changes") runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")") runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ") runCmd.Flags().String("path", "/", "get secrets within a folder path") @@ -263,7 +237,7 @@ func executeSingleCommandWithEnvs(args []string, secretsCount int, env []string) cmd.Stderr = os.Stderr cmd.Env = env - return execCmd(cmd) + return execBasicCmd(cmd) } func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env []string) error { @@ -286,11 +260,10 @@ func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env [] log.Info().Msgf(color.GreenString("Injecting %v Infisical secrets into your application process", secretsCount)) log.Debug().Msgf("executing command: %s %s %s \n", shell[0], shell[1], fullCommand) - return execCmd(cmd) + return execBasicCmd(cmd) } -// Credit: inspired by AWS Valut -func execCmd(cmd *exec.Cmd) error { +func execBasicCmd(cmd *exec.Cmd) error { sigChannel := make(chan os.Signal, 1) signal.Notify(sigChannel) @@ -314,3 +287,204 @@ func execCmd(cmd *exec.Cmd) error { os.Exit(waitStatus.ExitStatus()) return nil } + +func waitForExitCommand(cmd *exec.Cmd) (int, error) { + if err := cmd.Wait(); err != nil { + // ignore errors + cmd.Process.Signal(os.Kill) // #nosec G104 + + if exitError, ok := err.(*exec.ExitError); ok { + return exitError.ExitCode(), exitError + } + + return 2, err + } + + waitStatus, ok := cmd.ProcessState.Sys().(syscall.WaitStatus) + if !ok { + return 2, fmt.Errorf("unexpected ProcessState type, expected syscall.WaitStatus, got %T", waitStatus) + } + return waitStatus.ExitStatus(), nil +} + +func executeCommandWithWatchMode(commandFlag string, args []string, watchModeInterval int, request models.GetAllSecretsParameters, projectConfigDir string, secretOverriding bool, token *models.TokenDetails) { + + var cmd *exec.Cmd + var err error + var lastSecretsFetch time.Time + var lastUpdateEvent time.Time + var watchMutex sync.Mutex + var processMutex sync.Mutex + var beingTerminated = false + var currentETag string + + if err != nil { + util.HandleError(err, "Failed to fetch secrets") + } + + runCommandWithWatcher := func(environmentVariables models.InjectableEnvironmentResult) { + currentETag = environmentVariables.ETag + secretsFetchedAt := time.Now() + if secretsFetchedAt.After(lastSecretsFetch) { + lastSecretsFetch = secretsFetchedAt + } + + shouldRestartProcess := cmd != nil + // terminate the old process before starting a new one + if shouldRestartProcess { + log.Info().Msg(color.HiMagentaString("[HOT RELOAD] Environment changes detected. Reloading process...")) + beingTerminated = true + + log.Debug().Msgf(color.HiMagentaString("[HOT RELOAD] Sending SIGTERM to PID %d", cmd.Process.Pid)) + if e := cmd.Process.Signal(syscall.SIGTERM); e != nil { + log.Error().Err(e).Msg(color.HiMagentaString("[HOT RELOAD] Failed to send SIGTERM")) + } + // wait up to 10 sec for the process to exit + for i := 0; i < 10; i++ { + if !util.IsProcessRunning(cmd.Process) { + // process has been killed so we break out + break + } + if i == 5 { + log.Debug().Msg(color.HiMagentaString("[HOT RELOAD] Still waiting for process exit status")) + } + time.Sleep(time.Second) + } + + // SIGTERM may not work on Windows so we try SIGKILL + if util.IsProcessRunning(cmd.Process) { + log.Debug().Msg(color.HiMagentaString("[HOT RELOAD] Process still hasn't fully exited, attempting SIGKILL")) + if e := cmd.Process.Kill(); e != nil { + log.Error().Err(e).Msg(color.HiMagentaString("[HOT RELOAD] Failed to send SIGKILL")) + } + } + + cmd = nil + } else { + // If `cmd` is nil, we know this is the first time we are starting the process + log.Info().Msg(color.HiMagentaString("[HOT RELOAD] Watching for secret changes...")) + } + + processMutex.Lock() + + if lastUpdateEvent.After(secretsFetchedAt) { + processMutex.Unlock() + return + } + + beingTerminated = false + watcherWaitGroup.Add(1) + + // start the process + log.Info().Msgf(color.GreenString("Injecting %v Infisical secrets into your application process", environmentVariables.SecretsCount)) + + cmd, err = util.RunCommand(commandFlag, args, environmentVariables.Variables, false) + if err != nil { + defer watcherWaitGroup.Done() + util.HandleError(err) + } + + go func() { + defer processMutex.Unlock() + defer watcherWaitGroup.Done() + + exitCode, err := waitForExitCommand(cmd) + + // ignore errors if we are being terminated + if !beingTerminated { + if err != nil { + if strings.HasPrefix(err.Error(), "exec") || strings.HasPrefix(err.Error(), "fork/exec") { + log.Error().Err(err).Msg("Failed to execute command") + } + if err.Error() != ErrManualSignalInterrupt.Error() { + log.Error().Err(err).Msg("Process exited with error") + } + } + + os.Exit(exitCode) + } + }() + } + + recheckSecretsChannel := make(chan bool, 1) + recheckSecretsChannel <- true + + // a simple goroutine that triggers the recheckSecretsChan every watch interval (defaults to 10 seconds) + go func() { + for { + time.Sleep(time.Duration(watchModeInterval) * time.Second) + recheckSecretsChannel <- true + } + }() + + for { + <-recheckSecretsChannel + watchMutex.Lock() + + newEnvironmentVariables, err := fetchAndFormatSecretsForShell(request, projectConfigDir, secretOverriding, token) + if err != nil { + log.Error().Err(err).Msg("[HOT RELOAD] Failed to fetch secrets") + continue + } + + if newEnvironmentVariables.ETag != currentETag { + runCommandWithWatcher(newEnvironmentVariables) + } else { + log.Debug().Msg("[HOT RELOAD] No changes detected in secrets, not reloading process") + } + + watchMutex.Unlock() + + } +} + +func fetchAndFormatSecretsForShell(request models.GetAllSecretsParameters, projectConfigDir string, secretOverriding bool, token *models.TokenDetails) (models.InjectableEnvironmentResult, error) { + + if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { + request.InfisicalToken = token.Token + } else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER { + request.UniversalAuthAccessToken = token.Token + } + + secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir) + + if err != nil { + return models.InjectableEnvironmentResult{}, err + } + + if secretOverriding { + secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL) + } else { + secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED) + } + + secretsByKey := getSecretsByKeys(secrets) + environmentVariables := make(map[string]string) + + // add all existing environment vars + for _, s := range os.Environ() { + kv := strings.SplitN(s, "=", 2) + key := kv[0] + value := kv[1] + environmentVariables[key] = value + } + + // check to see if there are any reserved key words in secrets to inject + filterReservedEnvVars(secretsByKey) + + // now add infisical secrets + for k, v := range secretsByKey { + environmentVariables[k] = v.Value + } + + env := make([]string, 0, len(environmentVariables)) + for key, value := range environmentVariables { + env = append(env, key+"="+value) + } + + return models.InjectableEnvironmentResult{ + Variables: env, + ETag: util.GenerateETagFromSecrets(secrets), + SecretsCount: len(secretsByKey), + }, nil +} diff --git a/cli/packages/cmd/secrets.go b/cli/packages/cmd/secrets.go index 423fe1657c..eff011c5eb 100644 --- a/cli/packages/cmd/secrets.go +++ b/cli/packages/cmd/secrets.go @@ -4,23 +4,17 @@ Copyright (c) 2023 Infisical Inc. package cmd import ( - "crypto/sha256" - "encoding/base64" "fmt" - "os" "regexp" "sort" "strings" - "unicode" "github.com/Infisical/infisical-merge/packages/api" - "github.com/Infisical/infisical-merge/packages/crypto" "github.com/Infisical/infisical-merge/packages/models" "github.com/Infisical/infisical-merge/packages/util" "github.com/Infisical/infisical-merge/packages/visualize" "github.com/go-resty/resty/v2" "github.com/posthog/posthog-go" - "github.com/rs/zerolog/log" "github.com/spf13/cobra" ) @@ -79,13 +73,19 @@ var secretsCmd = &cobra.Command{ util.HandleError(err, "Unable to parse flag") } + plainOutput, err := cmd.Flags().GetBool("plain") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + request := models.GetAllSecretsParameters{ - Environment: environmentName, - WorkspaceId: projectId, - TagSlugs: tagSlugs, - SecretsPath: secretsPath, - IncludeImport: includeImports, - Recursive: recursive, + Environment: environmentName, + WorkspaceId: projectId, + TagSlugs: tagSlugs, + SecretsPath: secretsPath, + IncludeImport: includeImports, + Recursive: recursive, + ExpandSecretReferences: shouldExpandSecrets, } if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { @@ -105,22 +105,17 @@ var secretsCmd = &cobra.Command{ secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED) } - if shouldExpandSecrets { - - authParams := models.ExpandSecretsAuthentication{} - if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { - authParams.InfisicalToken = token.Token - } else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER { - authParams.UniversalAuthAccessToken = token.Token - } - - secrets = util.ExpandSecrets(secrets, authParams, "") - } - // Sort the secrets by key so we can create a consistent output secrets = util.SortSecretsByKeys(secrets) - visualize.PrintAllSecretDetails(secrets) + if plainOutput { + for _, secret := range secrets { + fmt.Println(secret.Value) + } + } else { + visualize.PrintAllSecretDetails(secrets) + } + Telemetry.CaptureEvent("cli-command:secrets", posthog.NewProperties().Set("secretCount", len(secrets)).Set("version", util.CLI_VERSION)) }, } @@ -150,7 +145,14 @@ var secretsSetCmd = &cobra.Command{ DisableFlagsInUseLine: true, Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { - util.RequireLocalWorkspaceFile() + token, err := util.GetInfisicalToken(cmd) + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + if token == nil { + util.RequireLocalWorkspaceFile() + } environmentName, _ := cmd.Flags().GetString("env") if !cmd.Flags().Changed("env") { @@ -160,14 +162,14 @@ var secretsSetCmd = &cobra.Command{ } } - secretsPath, err := cmd.Flags().GetString("path") + projectId, err := cmd.Flags().GetString("projectId") if err != nil { util.HandleError(err, "Unable to parse flag") } - workspaceFile, err := util.GetWorkSpaceFromFile() + secretsPath, err := cmd.Flags().GetString("path") if err != nil { - util.HandleError(err, "Unable to get your local config details") + util.HandleError(err, "Unable to parse flag") } secretType, err := cmd.Flags().GetString("type") @@ -175,196 +177,40 @@ var secretsSetCmd = &cobra.Command{ util.HandleError(err, "Unable to parse secret type") } - loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails() - if err != nil { - util.HandleError(err, "Unable to authenticate") - } - - if loggedInUserDetails.LoginExpired { - util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") - } - - - httpClient := resty.New(). - SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken). - SetHeader("Accept", "application/json") - - request := api.GetEncryptedWorkspaceKeyRequest{ - WorkspaceId: workspaceFile.WorkspaceId, - } - - workspaceKeyResponse, err := api.CallGetEncryptedWorkspaceKey(httpClient, request) - if err != nil { - util.HandleError(err, "unable to get your encrypted workspace key") - } - - encryptedWorkspaceKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey) - encryptedWorkspaceKeySenderPublicKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey) - encryptedWorkspaceKeyNonce, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce) - currentUsersPrivateKey, _ := base64.StdEncoding.DecodeString(loggedInUserDetails.UserCredentials.PrivateKey) - - if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 { - log.Debug().Msgf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey) - util.PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again") - } - - // decrypt workspace key - plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey) - - infisicalTokenEnv := os.Getenv(util.INFISICAL_TOKEN_NAME) - - // pull current secrets - secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, InfisicalToken: infisicalTokenEnv}, "") - if err != nil { - util.HandleError(err, "unable to retrieve secrets") - } - - type SecretSetOperation struct { - SecretKey string - SecretValue string - SecretOperation string - } - - secretsToCreate := []api.Secret{} - secretsToModify := []api.Secret{} - secretOperations := []SecretSetOperation{} - - sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets)) - personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets)) - - for _, secret := range secrets { - if secret.Type == util.SECRET_TYPE_PERSONAL { - personalSecretMapByName[secret.Key] = secret - } else { - sharedSecretMapByName[secret.Key] = secret - } - } - - for _, arg := range args { - splitKeyValueFromArg := strings.SplitN(arg, "=", 2) - if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" { - util.PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again") + var secretOperations []models.SecretSetOperation + if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) { + if projectId == "" { + util.PrintErrorMessageAndExit("When using service tokens or machine identities, you must set the --projectId flag") } - if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) { - util.PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again") - } - - // Key and value from argument - key := splitKeyValueFromArg[0] - value := splitKeyValueFromArg[1] - - hashedKey := fmt.Sprintf("%x", sha256.Sum256([]byte(key))) - encryptedKey, err := crypto.EncryptSymmetric([]byte(key), []byte(plainTextEncryptionKey)) - if err != nil { - util.HandleError(err, "unable to encrypt your secrets") - } - - hashedValue := fmt.Sprintf("%x", sha256.Sum256([]byte(value))) - encryptedValue, err := crypto.EncryptSymmetric([]byte(value), []byte(plainTextEncryptionKey)) - if err != nil { - util.HandleError(err, "unable to encrypt your secrets") - } - - var existingSecret models.SingleEnvironmentVariable - var doesSecretExist bool - - if secretType == util.SECRET_TYPE_SHARED { - existingSecret, doesSecretExist = sharedSecretMapByName[key] - } else { - existingSecret, doesSecretExist = personalSecretMapByName[key] - } - - if doesSecretExist { - // case: secret exists in project so it needs to be modified - encryptedSecretDetails := api.Secret{ - ID: existingSecret.ID, - SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText), - SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce), - SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag), - SecretValueHash: hashedValue, - PlainTextKey: key, - Type: existingSecret.Type, + secretOperations, err = util.SetRawSecrets(args, secretType, environmentName, secretsPath, projectId, token) + } else { + if projectId == "" { + workspaceFile, err := util.GetWorkSpaceFromFile() + if err != nil { + util.HandleError(err, "unable to get your local config details [err=%v]") } - // Only add to modifications if the value is different - if existingSecret.Value != value { - secretsToModify = append(secretsToModify, encryptedSecretDetails) - secretOperations = append(secretOperations, SecretSetOperation{ - SecretKey: key, - SecretValue: value, - SecretOperation: "SECRET VALUE MODIFIED", - }) - } else { - // Current value is same as exisitng so no change - secretOperations = append(secretOperations, SecretSetOperation{ - SecretKey: key, - SecretValue: value, - SecretOperation: "SECRET VALUE UNCHANGED", - }) - } - - } else { - // case: secret doesn't exist in project so it needs to be created - encryptedSecretDetails := api.Secret{ - SecretKeyCiphertext: base64.StdEncoding.EncodeToString(encryptedKey.CipherText), - SecretKeyIV: base64.StdEncoding.EncodeToString(encryptedKey.Nonce), - SecretKeyTag: base64.StdEncoding.EncodeToString(encryptedKey.AuthTag), - SecretKeyHash: hashedKey, - SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText), - SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce), - SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag), - SecretValueHash: hashedValue, - Type: secretType, - PlainTextKey: key, - } - secretsToCreate = append(secretsToCreate, encryptedSecretDetails) - secretOperations = append(secretOperations, SecretSetOperation{ - SecretKey: key, - SecretValue: value, - SecretOperation: "SECRET CREATED", - }) + projectId = workspaceFile.WorkspaceId } + + loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails() + if err != nil { + util.HandleError(err, "unable to authenticate [err=%v]") + } + + if loggedInUserDetails.LoginExpired { + util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") + } + + secretOperations, err = util.SetRawSecrets(args, secretType, environmentName, secretsPath, projectId, &models.TokenDetails{ + Type: "", + Token: loggedInUserDetails.UserCredentials.JTWToken, + }) } - for _, secret := range secretsToCreate { - createSecretRequest := api.CreateSecretV3Request{ - WorkspaceID: workspaceFile.WorkspaceId, - Environment: environmentName, - SecretName: secret.PlainTextKey, - SecretKeyCiphertext: secret.SecretKeyCiphertext, - SecretKeyIV: secret.SecretKeyIV, - SecretKeyTag: secret.SecretKeyTag, - SecretValueCiphertext: secret.SecretValueCiphertext, - SecretValueIV: secret.SecretValueIV, - SecretValueTag: secret.SecretValueTag, - Type: secret.Type, - SecretPath: secretsPath, - } - - err = api.CallCreateSecretsV3(httpClient, createSecretRequest) - if err != nil { - util.HandleError(err, "Unable to process new secret creations") - return - } - } - - for _, secret := range secretsToModify { - updateSecretRequest := api.UpdateSecretByNameV3Request{ - WorkspaceID: workspaceFile.WorkspaceId, - Environment: environmentName, - SecretValueCiphertext: secret.SecretValueCiphertext, - SecretValueIV: secret.SecretValueIV, - SecretValueTag: secret.SecretValueTag, - Type: secret.Type, - SecretPath: secretsPath, - } - - err = api.CallUpdateSecretsV3(httpClient, updateSecretRequest, secret.PlainTextKey) - if err != nil { - util.HandleError(err, "Unable to process secret update request") - return - } + if err != nil { + util.HandleError(err, "Unable to set secrets") } // Print secret operations @@ -395,6 +241,16 @@ var secretsDeleteCmd = &cobra.Command{ } } + token, err := util.GetInfisicalToken(cmd) + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + projectId, err := cmd.Flags().GetString("projectId") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + secretsPath, err := cmd.Flags().GetString("path") if err != nil { util.HandleError(err, "Unable to parse flag") @@ -405,34 +261,45 @@ var secretsDeleteCmd = &cobra.Command{ util.HandleError(err, "Unable to parse flag") } - loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails() - if err != nil { - util.HandleError(err, "Unable to authenticate") + httpClient := resty.New(). + SetHeader("Accept", "application/json") + + if projectId == "" { + workspaceFile, err := util.GetWorkSpaceFromFile() + if err != nil { + util.HandleError(err, "Unable to get local project details") + } + projectId = workspaceFile.WorkspaceId } - if loggedInUserDetails.LoginExpired { - util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") - } + if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) { + httpClient.SetAuthToken(token.Token) + } else { + util.RequireLogin() + util.RequireLocalWorkspaceFile() - workspaceFile, err := util.GetWorkSpaceFromFile() - if err != nil { - util.HandleError(err, "Unable to get local project details") + loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails() + if err != nil { + util.HandleError(err, "Unable to authenticate") + } + + if loggedInUserDetails.LoginExpired { + util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") + } + + httpClient.SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken) } for _, secretName := range args { request := api.DeleteSecretV3Request{ - WorkspaceId: workspaceFile.WorkspaceId, + WorkspaceId: projectId, Environment: environmentName, SecretName: secretName, Type: secretType, SecretPath: secretsPath, } - httpClient := resty.New(). - SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken). - SetHeader("Accept", "application/json") - - err = api.CallDeleteSecretsV3(httpClient, request) + err = api.CallDeleteSecretsRawV3(httpClient, request) if err != nil { util.HandleError(err, "Unable to complete your delete request") } @@ -483,18 +350,35 @@ func getSecretsByNames(cmd *cobra.Command, args []string) { util.HandleError(err, "Unable to parse recursive flag") } + // deprecated, in favor of --plain showOnlyValue, err := cmd.Flags().GetBool("raw-value") if err != nil { - util.HandleError(err, "Unable to parse path flag") + util.HandleError(err, "Unable to parse flag") + } + + plainOutput, err := cmd.Flags().GetBool("plain") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + includeImports, err := cmd.Flags().GetBool("include-imports") + if err != nil { + util.HandleError(err, "Unable to parse flag") + } + + secretOverriding, err := cmd.Flags().GetBool("secret-overriding") + if err != nil { + util.HandleError(err, "Unable to parse flag") } request := models.GetAllSecretsParameters{ - Environment: environmentName, - WorkspaceId: projectId, - TagSlugs: tagSlugs, - SecretsPath: secretsPath, - IncludeImport: true, - Recursive: recursive, + Environment: environmentName, + WorkspaceId: projectId, + TagSlugs: tagSlugs, + SecretsPath: secretsPath, + IncludeImport: includeImports, + Recursive: recursive, + ExpandSecretReferences: shouldExpand, } if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { @@ -508,15 +392,10 @@ func getSecretsByNames(cmd *cobra.Command, args []string) { util.HandleError(err, "To fetch all secrets") } - if shouldExpand { - authParams := models.ExpandSecretsAuthentication{} - if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER { - authParams.InfisicalToken = token.Token - } else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER { - authParams.UniversalAuthAccessToken = token.Token - } - - secrets = util.ExpandSecrets(secrets, authParams, "") + if secretOverriding { + secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL) + } else { + secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED) } requestedSecrets := []models.SingleEnvironmentVariable{} @@ -527,23 +406,25 @@ func getSecretsByNames(cmd *cobra.Command, args []string) { if value, ok := secretsMap[secretKeyFromArg]; ok { requestedSecrets = append(requestedSecrets, value) } else { - requestedSecrets = append(requestedSecrets, models.SingleEnvironmentVariable{ - Key: secretKeyFromArg, - Type: "*not found*", - Value: "*not found*", - }) + if !(plainOutput || showOnlyValue) { + requestedSecrets = append(requestedSecrets, models.SingleEnvironmentVariable{ + Key: secretKeyFromArg, + Type: "*not found*", + Value: "*not found*", + }) + } } } - if showOnlyValue && len(requestedSecrets) > 1 { - util.PrintErrorMessageAndExit("--raw-value only works with one secret.") - } - - if showOnlyValue { - fmt.Printf(requestedSecrets[0].Value) + // showOnlyValue deprecated in favor of --plain, below only for backward compatibility + if plainOutput || showOnlyValue { + for _, secret := range requestedSecrets { + fmt.Println(secret.Value) + } } else { visualize.PrintAllSecretDetails(requestedSecrets) } + Telemetry.CaptureEvent("cli-command:secrets get", posthog.NewProperties().Set("secretCount", len(secrets)).Set("version", util.CLI_VERSION)) } @@ -789,56 +670,54 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod } func init() { - secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token") - secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity") + secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId when using machine identity based auth") secretsGenerateExampleEnvCmd.Flags().String("path", "/", "Fetch secrets from within a folder path") secretsCmd.AddCommand(secretsGenerateExampleEnvCmd) - secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token") - secretsGetCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity") + secretsGetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + secretsGetCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth") secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path") - secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets") - secretsGetCmd.Flags().Bool("raw-value", false, "Returns only the value of secret, only works with one secret") + secretsGetCmd.Flags().Bool("plain", false, "print values without formatting, one per line") + secretsGetCmd.Flags().Bool("raw-value", false, "deprecated. Returns only the value of secret, only works with one secret. Use --plain instead") + secretsGetCmd.Flags().Bool("include-imports", true, "Imported linked secrets ") + secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets, and process your referenced secrets") secretsGetCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders") + secretsGetCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets") secretsCmd.AddCommand(secretsGetCmd) - secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets") secretsCmd.AddCommand(secretsSetCmd) + secretsSetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + secretsSetCmd.Flags().String("projectId", "", "manually set the project ID to for setting secrets when using machine identity based auth") secretsSetCmd.Flags().String("path", "/", "set secrets within a folder path") secretsSetCmd.Flags().String("type", util.SECRET_TYPE_SHARED, "the type of secret to create: personal or shared") - // Only supports logged in users (JWT auth) - secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) { - util.RequireLogin() - util.RequireLocalWorkspaceFile() - } - secretsDeleteCmd.Flags().String("type", "personal", "the type of secret to delete: personal or shared (default: personal)") + secretsDeleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + secretsDeleteCmd.Flags().String("projectId", "", "manually set the projectId to delete secrets from when using machine identity based auth") secretsDeleteCmd.Flags().String("path", "/", "get secrets within a folder path") secretsCmd.AddCommand(secretsDeleteCmd) - // Only supports logged in users (JWT auth) - secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) { - util.RequireLogin() - util.RequireLocalWorkspaceFile() - } - // *** Folders sub command *** folderCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on") // Add getCmd, createCmd and deleteCmd flags here getCmd.Flags().StringP("path", "p", "/", "The path from where folders should be fetched from") - getCmd.Flags().String("token", "", "Fetch folders using the infisical token") - getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity") + getCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from when using machine identity based auth") folderCmd.AddCommand(getCmd) // Add createCmd flags here createCmd.Flags().StringP("path", "p", "/", "Path to where the folder should be created") createCmd.Flags().StringP("name", "n", "", "Name of the folder to be created in selected `--path`") + createCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + createCmd.Flags().String("projectId", "", "manually set the project ID for creating folders in when using machine identity based auth") folderCmd.AddCommand(createCmd) // Add deleteCmd flags here deleteCmd.Flags().StringP("path", "p", "/", "Path to the folder to be deleted") + deleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + deleteCmd.Flags().String("projectId", "", "manually set the projectId to delete folders when using machine identity based auth") deleteCmd.Flags().StringP("name", "n", "", "Name of the folder to be deleted within selected `--path`") folderCmd.AddCommand(deleteCmd) @@ -846,13 +725,14 @@ func init() { // ** End of folders sub command - secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token") - secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity") + secretsCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token") + secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets when using machine identity based auth") secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on") - secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets") + secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets, and process your referenced secrets") secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ") secretsCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders") secretsCmd.PersistentFlags().StringP("tags", "t", "", "filter secrets by tag slugs") secretsCmd.Flags().String("path", "/", "get secrets within a folder path") + secretsCmd.Flags().Bool("plain", false, "print values without formatting, one per line") rootCmd.AddCommand(secretsCmd) } diff --git a/cli/packages/cmd/token.go b/cli/packages/cmd/token.go index 3e5d42765e..4e568cb854 100644 --- a/cli/packages/cmd/token.go +++ b/cli/packages/cmd/token.go @@ -39,7 +39,7 @@ var tokenRenewCmd = &cobra.Command{ util.PrintErrorMessageAndExit("You are trying to renew a service token. You can only renew universal auth access tokens.") } - renewedAccessToken, err := util.RenewUniversalAuthAccessToken(token) + renewedAccessToken, err := util.RenewMachineIdentityAccessToken(token) if err != nil { util.HandleError(err, "Unable to renew token") diff --git a/cli/packages/cmd/user.go b/cli/packages/cmd/user.go index 844213e18a..d3e6096a9b 100644 --- a/cli/packages/cmd/user.go +++ b/cli/packages/cmd/user.go @@ -2,6 +2,7 @@ package cmd import ( "errors" + "fmt" "net/url" "github.com/Infisical/infisical-merge/packages/config" @@ -119,7 +120,7 @@ var domainCmd = &cobra.Command{ domain := "" domainQuery := true - if config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL { + if config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_EU_URL) && config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL) { override, err := DomainOverridePrompt() if err != nil { diff --git a/cli/packages/cmd/vault.go b/cli/packages/cmd/vault.go index 01bee147b2..6a92ef960e 100644 --- a/cli/packages/cmd/vault.go +++ b/cli/packages/cmd/vault.go @@ -4,6 +4,7 @@ Copyright (c) 2023 Infisical Inc. package cmd import ( + "encoding/base64" "fmt" "strings" @@ -13,13 +14,26 @@ import ( "github.com/spf13/cobra" ) -var AvailableVaultsAndDescriptions = []string{"auto (automatically select native vault on system)", "file (encrypted file vault)"} -var AvailableVaults = []string{"auto", "file"} +type VaultBackendType struct { + Name string + Description string +} + +var AvailableVaults = []VaultBackendType{ + { + Name: "auto", + Description: "automatically select the system keyring", + }, + { + Name: "file", + Description: "encrypted file vault", + }, +} var vaultSetCmd = &cobra.Command{ - Example: `infisical vault set pass`, - Use: "set [vault-name]", - Short: "Used to set the vault backend to store your login details securely at rest", + Example: `infisical vault set file`, + Use: "set [file|auto]", + Short: "Used to configure the vault backends", DisableFlagsInUseLine: true, Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { @@ -35,15 +49,16 @@ var vaultSetCmd = &cobra.Command{ return } - if wantedVaultTypeName == "auto" || wantedVaultTypeName == "file" { + if wantedVaultTypeName == util.VAULT_BACKEND_AUTO_MODE || wantedVaultTypeName == util.VAULT_BACKEND_FILE_MODE { configFile, err := util.GetConfigFile() if err != nil { log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err) return } - configFile.VaultBackendType = wantedVaultTypeName // save selected vault - configFile.LoggedInUserEmail = "" // reset the logged in user to prompt them to re login + configFile.VaultBackendType = wantedVaultTypeName + configFile.LoggedInUserEmail = "" + configFile.VaultBackendPassphrase = base64.StdEncoding.EncodeToString([]byte(util.GenerateRandomString(10))) err = util.WriteConfigFile(&configFile) if err != nil { @@ -55,7 +70,11 @@ var vaultSetCmd = &cobra.Command{ Telemetry.CaptureEvent("cli-command:vault set", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("wantedVault", wantedVaultTypeName).Set("version", util.CLI_VERSION)) } else { - log.Error().Msgf("The requested vault type [%s] is not available on this system. Only the following vault backends are available for you system: %s", wantedVaultTypeName, strings.Join(AvailableVaults, ", ")) + var availableVaultsNames []string + for _, vault := range AvailableVaults { + availableVaultsNames = append(availableVaultsNames, vault.Name) + } + log.Error().Msgf("The requested vault type [%s] is not available on this system. Only the following vault backends are available for you system: %s", wantedVaultTypeName, strings.Join(availableVaultsNames, ", ")) } }, } @@ -73,8 +92,8 @@ var vaultCmd = &cobra.Command{ func printAvailableVaultBackends() { fmt.Printf("Vaults are used to securely store your login details locally. Available vaults:") - for _, backend := range AvailableVaultsAndDescriptions { - fmt.Printf("\n- %s", backend) + for _, vaultType := range AvailableVaults { + fmt.Printf("\n- %s (%s)", vaultType.Name, vaultType.Description) } currentVaultBackend, err := util.GetCurrentVaultBackend() @@ -89,5 +108,6 @@ func printAvailableVaultBackends() { func init() { vaultCmd.AddCommand(vaultSetCmd) + rootCmd.AddCommand(vaultCmd) } diff --git a/cli/packages/models/cli.go b/cli/packages/models/cli.go index 68527c4696..8b9fef6f6c 100644 --- a/cli/packages/models/cli.go +++ b/cli/packages/models/cli.go @@ -11,10 +11,12 @@ type UserCredentials struct { // The file struct for Infisical config file type ConfigFile struct { - LoggedInUserEmail string `json:"loggedInUserEmail"` - LoggedInUserDomain string `json:"LoggedInUserDomain,omitempty"` - LoggedInUsers []LoggedInUser `json:"loggedInUsers,omitempty"` - VaultBackendType string `json:"vaultBackendType,omitempty"` + LoggedInUserEmail string `json:"loggedInUserEmail"` + LoggedInUserDomain string `json:"LoggedInUserDomain,omitempty"` + LoggedInUsers []LoggedInUser `json:"loggedInUsers,omitempty"` + VaultBackendType string `json:"vaultBackendType,omitempty"` + VaultBackendPassphrase string `json:"vaultBackendPassphrase,omitempty"` + Domains []string `json:"domains,omitempty"` } type LoggedInUser struct { @@ -28,6 +30,7 @@ type SingleEnvironmentVariable struct { Value string `json:"value"` Type string `json:"type"` ID string `json:"_id"` + SecretPath string `json:"secretPath"` Tags []struct { ID string `json:"_id"` Name string `json:"name"` @@ -35,6 +38,7 @@ type SingleEnvironmentVariable struct { Workspace string `json:"workspace"` } `json:"tags"` Comment string `json:"comment"` + Etag string `json:"Etag"` } type PlaintextSecretResult struct { @@ -60,8 +64,9 @@ type DynamicSecretLease struct { } type TokenDetails struct { - Type string - Token string + Type string + Token string + Source string } type SingleFolder struct { @@ -99,6 +104,13 @@ type GetAllSecretsParameters struct { SecretsPath string IncludeImport bool Recursive bool + ExpandSecretReferences bool +} + +type InjectableEnvironmentResult struct { + Variables []string + ETag string + SecretsCount int } type GetAllFoldersParameters struct { @@ -134,3 +146,16 @@ type MachineIdentityCredentials struct { ClientId string ClientSecret string } + +type SecretSetOperation struct { + SecretKey string + SecretValue string + SecretOperation string +} + +type BackupSecretKeyRing struct { + ProjectID string `json:"projectId"` + Environment string `json:"environment"` + SecretPath string `json:"secretPath"` + Secrets []SingleEnvironmentVariable +} diff --git a/cli/packages/util/agent.go b/cli/packages/util/agent.go index 188ae5de25..215e43551b 100644 --- a/cli/packages/util/agent.go +++ b/cli/packages/util/agent.go @@ -24,7 +24,7 @@ func ConvertPollingIntervalToTime(pollingInterval string) (time.Duration, error) switch unit { case "s": if number < 60 { - return 0, fmt.Errorf("polling interval should be at least 60 seconds") + return 0, fmt.Errorf("polling interval must be at least 60 seconds") } return time.Duration(number) * time.Second, nil case "m": diff --git a/cli/packages/util/auth.go b/cli/packages/util/auth.go new file mode 100644 index 0000000000..cdcd7b50a1 --- /dev/null +++ b/cli/packages/util/auth.go @@ -0,0 +1,45 @@ +package util + +type AuthStrategyType string + +var AuthStrategy = struct { + UNIVERSAL_AUTH AuthStrategyType + KUBERNETES_AUTH AuthStrategyType + AZURE_AUTH AuthStrategyType + GCP_ID_TOKEN_AUTH AuthStrategyType + GCP_IAM_AUTH AuthStrategyType + AWS_IAM_AUTH AuthStrategyType + OIDC_AUTH AuthStrategyType +}{ + UNIVERSAL_AUTH: "universal-auth", + KUBERNETES_AUTH: "kubernetes", + AZURE_AUTH: "azure", + GCP_ID_TOKEN_AUTH: "gcp-id-token", + GCP_IAM_AUTH: "gcp-iam", + AWS_IAM_AUTH: "aws-iam", + OIDC_AUTH: "oidc-auth", +} + +var AVAILABLE_AUTH_STRATEGIES = []AuthStrategyType{ + AuthStrategy.UNIVERSAL_AUTH, + AuthStrategy.KUBERNETES_AUTH, + AuthStrategy.AZURE_AUTH, + AuthStrategy.GCP_ID_TOKEN_AUTH, + AuthStrategy.GCP_IAM_AUTH, + AuthStrategy.AWS_IAM_AUTH, + AuthStrategy.OIDC_AUTH, +} + +func IsAuthMethodValid(authMethod string, allowUserAuth bool) (isValid bool, strategy AuthStrategyType) { + + if authMethod == "user" && allowUserAuth { + return true, "" + } + + for _, strategy := range AVAILABLE_AUTH_STRATEGIES { + if string(strategy) == authMethod { + return true, strategy + } + } + return false, "" +} diff --git a/cli/packages/util/common.go b/cli/packages/util/common.go index 2b57383eff..55907da9de 100644 --- a/cli/packages/util/common.go +++ b/cli/packages/util/common.go @@ -4,6 +4,8 @@ import ( "fmt" "net/http" "os" + + "github.com/Infisical/infisical-merge/packages/config" ) func GetHomeDir() (string, error) { @@ -21,7 +23,7 @@ func WriteToFile(fileName string, dataToWrite []byte, filePerm os.FileMode) erro return nil } -func CheckIsConnectedToInternet() (ok bool) { - _, err := http.Get("http://clients3.google.com/generate_204") +func ValidateInfisicalAPIConnection() (ok bool) { + _, err := http.Get(fmt.Sprintf("%v/status", config.INFISICAL_URL)) return err == nil } diff --git a/cli/packages/util/config.go b/cli/packages/util/config.go index 2216098523..02030e1fab 100644 --- a/cli/packages/util/config.go +++ b/cli/packages/util/config.go @@ -1,6 +1,7 @@ package util import ( + "encoding/base64" "encoding/json" "errors" "fmt" @@ -50,10 +51,11 @@ func WriteInitalConfig(userCredentials *models.UserCredentials) error { } configFile := models.ConfigFile{ - LoggedInUserEmail: userCredentials.Email, - LoggedInUserDomain: config.INFISICAL_URL, - LoggedInUsers: existingConfigFile.LoggedInUsers, - VaultBackendType: existingConfigFile.VaultBackendType, + LoggedInUserEmail: userCredentials.Email, + LoggedInUserDomain: config.INFISICAL_URL, + LoggedInUsers: existingConfigFile.LoggedInUsers, + VaultBackendType: existingConfigFile.VaultBackendType, + VaultBackendPassphrase: existingConfigFile.VaultBackendPassphrase, } configFileMarshalled, err := json.Marshal(configFile) @@ -215,6 +217,14 @@ func GetConfigFile() (models.ConfigFile, error) { return models.ConfigFile{}, err } + if configFile.VaultBackendPassphrase != "" { + decodedPassphrase, err := base64.StdEncoding.DecodeString(configFile.VaultBackendPassphrase) + if err != nil { + return models.ConfigFile{}, fmt.Errorf("GetConfigFile: Unable to decode base64 passphrase [err=%s]", err) + } + os.Setenv("INFISICAL_VAULT_FILE_PASSPHRASE", string(decodedPassphrase)) + } + return configFile, nil } @@ -244,10 +254,5 @@ func WriteConfigFile(configFile *models.ConfigFile) error { return fmt.Errorf("writeConfigFile: Unable to write to file [err=%s]", err) } - if err != nil { - return fmt.Errorf("writeConfigFile: unable to write config file because an error occurred when write the config to file [err=%s]", err) - - } - return nil } diff --git a/cli/packages/util/constants.go b/cli/packages/util/constants.go index 311a4b0d97..8b4c586e68 100644 --- a/cli/packages/util/constants.go +++ b/cli/packages/util/constants.go @@ -1,23 +1,45 @@ package util const ( - CONFIG_FILE_NAME = "infisical-config.json" - CONFIG_FOLDER_NAME = ".infisical" - INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api" - INFISICAL_DEFAULT_URL = "https://app.infisical.com" - INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json" - INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN" + CONFIG_FILE_NAME = "infisical-config.json" + CONFIG_FOLDER_NAME = ".infisical" + INFISICAL_DEFAULT_US_URL = "https://app.infisical.com" + INFISICAL_DEFAULT_EU_URL = "https://eu.infisical.com" + INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json" + INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN" + INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN" + INFISICAL_VAULT_FILE_PASSPHRASE_ENV_NAME = "INFISICAL_VAULT_FILE_PASSPHRASE" // This works because we've forked the keyring package and added support for this env variable. This explains why you won't find any occurrences of it in the CLI codebase. + + VAULT_BACKEND_AUTO_MODE = "auto" + VAULT_BACKEND_FILE_MODE = "file" + + // Universal Auth INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID" INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET" - INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN" - SECRET_TYPE_PERSONAL = "personal" - SECRET_TYPE_SHARED = "shared" - KEYRING_SERVICE_NAME = "infisical" - PERSONAL_SECRET_TYPE_NAME = "personal" - SHARED_SECRET_TYPE_NAME = "shared" + + // Kubernetes auth + INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME = "INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH" + + // GCP Auth + INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME = "INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH" + + // OIDC Auth + INFISICAL_OIDC_AUTH_JWT_NAME = "INFISICAL_OIDC_AUTH_JWT" + + // Generic env variable used for auth methods that require a machine identity ID + INFISICAL_MACHINE_IDENTITY_ID_NAME = "INFISICAL_MACHINE_IDENTITY_ID" + + SECRET_TYPE_PERSONAL = "personal" + SECRET_TYPE_SHARED = "shared" + KEYRING_SERVICE_NAME = "infisical" + PERSONAL_SECRET_TYPE_NAME = "personal" + SHARED_SECRET_TYPE_NAME = "shared" SERVICE_TOKEN_IDENTIFIER = "service-token" UNIVERSAL_AUTH_TOKEN_IDENTIFIER = "universal-auth-token" + + INFISICAL_BACKUP_SECRET = "infisical-backup-secrets" // akhilmhdh: @depreciated remove in version v0.30 + INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY = "infisical-backup-secret-encryption-key" ) var ( diff --git a/cli/packages/util/credentials.go b/cli/packages/util/credentials.go index 4856de35a3..cb5b94080c 100644 --- a/cli/packages/util/credentials.go +++ b/cli/packages/util/credentials.go @@ -52,10 +52,6 @@ func GetUserCredsFromKeyRing(userEmail string) (credentials models.UserCredentia return models.UserCredentials{}, fmt.Errorf("getUserCredsFromKeyRing: Something went wrong when unmarshalling user creds [err=%s]", err) } - if err != nil { - return models.UserCredentials{}, fmt.Errorf("GetUserCredsFromKeyRing: Unable to store user credentials [err=%s]", err) - } - return userCredentials, err } @@ -75,7 +71,7 @@ func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) { if strings.Contains(err.Error(), "credentials not found in system keyring") { return LoggedInUserDetails{}, errors.New("we couldn't find your logged in details, try running [infisical login] then try again") } else { - return LoggedInUserDetails{}, fmt.Errorf("failed to fetch creditnals from keyring because [err=%s]", err) + return LoggedInUserDetails{}, fmt.Errorf("failed to fetch credentials from keyring because [err=%s]", err) } } diff --git a/cli/packages/util/exec.go b/cli/packages/util/exec.go new file mode 100644 index 0000000000..2cdb50f424 --- /dev/null +++ b/cli/packages/util/exec.go @@ -0,0 +1,92 @@ +package util + +import ( + "fmt" + "os" + "os/exec" + "os/signal" + "runtime" + "syscall" +) + +func RunCommand(singleCommand string, args []string, env []string, waitForExit bool) (*exec.Cmd, error) { + var c *exec.Cmd + var err error + + if singleCommand != "" { + c, err = RunCommandFromString(singleCommand, env, waitForExit) + } else { + c, err = RunCommandFromArgs(args, env, waitForExit) + } + + return c, err +} + +func IsProcessRunning(p *os.Process) bool { + err := p.Signal(syscall.Signal(0)) + return err == nil +} + +// For "infisical run -- COMMAND" +func RunCommandFromArgs(args []string, env []string, waitForExit bool) (*exec.Cmd, error) { + cmd := exec.Command(args[0], args[1:]...) + cmd.Stdin = os.Stdin + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Env = env + + err := execCommand(cmd, waitForExit) + + return cmd, err +} + +func execCommand(cmd *exec.Cmd, waitForExit bool) error { + sigChannel := make(chan os.Signal, 1) + signal.Notify(sigChannel) + + if err := cmd.Start(); err != nil { + return err + } + + go func() { + for { + sig := <-sigChannel + _ = cmd.Process.Signal(sig) // process all sigs + } + }() + + if !waitForExit { + return nil + } + + if err := cmd.Wait(); err != nil { + _ = cmd.Process.Signal(os.Kill) + return fmt.Errorf("failed to wait for command termination: %v", err) + } + + waitStatus := cmd.ProcessState.Sys().(syscall.WaitStatus) + os.Exit(waitStatus.ExitStatus()) + return nil +} + +// For "infisical run --command=COMMAND" +func RunCommandFromString(command string, env []string, waitForExit bool) (*exec.Cmd, error) { + shell := [2]string{"sh", "-c"} + if runtime.GOOS == "windows" { + shell = [2]string{"cmd", "/C"} + } else { + currentShell := os.Getenv("SHELL") + if currentShell != "" { + shell[0] = currentShell + } + } + + cmd := exec.Command(shell[0], shell[1], command) // #nosec G204 nosemgrep: semgrep_configs.prohibit-exec-command + cmd.Env = env + cmd.Stdin = os.Stdin + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + + err := execCommand(cmd, waitForExit) + return cmd, err +} diff --git a/cli/packages/util/folders.go b/cli/packages/util/folders.go index 18fe5c888d..c7f6de6309 100644 --- a/cli/packages/util/folders.go +++ b/cli/packages/util/folders.go @@ -172,19 +172,28 @@ func GetFoldersViaMachineIdentity(accessToken string, workspaceId string, envSlu // CreateFolder creates a folder in Infisical func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, error) { - loggedInUserDetails, err := GetCurrentLoggedInUserDetails() - if err != nil { - return models.SingleFolder{}, err - } - if loggedInUserDetails.LoginExpired { - PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") + // If no token is provided, we will try to get the token from the current logged in user + if params.InfisicalToken == "" { + RequireLogin() + RequireLocalWorkspaceFile() + loggedInUserDetails, err := GetCurrentLoggedInUserDetails() + + if err != nil { + return models.SingleFolder{}, err + } + + if loggedInUserDetails.LoginExpired { + PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") + } + + params.InfisicalToken = loggedInUserDetails.UserCredentials.JTWToken } // set up resty client httpClient := resty.New() httpClient. - SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken). + SetAuthToken(params.InfisicalToken). SetHeader("Accept", "application/json"). SetHeader("Content-Type", "application/json") @@ -209,19 +218,29 @@ func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, er } func DeleteFolder(params models.DeleteFolderParameters) ([]models.SingleFolder, error) { - loggedInUserDetails, err := GetCurrentLoggedInUserDetails() - if err != nil { - return nil, err - } - if loggedInUserDetails.LoginExpired { - PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") + // If no token is provided, we will try to get the token from the current logged in user + if params.InfisicalToken == "" { + RequireLogin() + RequireLocalWorkspaceFile() + + loggedInUserDetails, err := GetCurrentLoggedInUserDetails() + + if err != nil { + return nil, err + } + + if loggedInUserDetails.LoginExpired { + PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") + } + + params.InfisicalToken = loggedInUserDetails.UserCredentials.JTWToken } // set up resty client httpClient := resty.New() httpClient. - SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken). + SetAuthToken(params.InfisicalToken). SetHeader("Accept", "application/json"). SetHeader("Content-Type", "application/json") diff --git a/cli/packages/util/helper.go b/cli/packages/util/helper.go index 9e50525302..11a1e3e0a1 100644 --- a/cli/packages/util/helper.go +++ b/cli/packages/util/helper.go @@ -4,7 +4,9 @@ import ( "bytes" "crypto/sha256" "encoding/base64" + "encoding/hex" "fmt" + "math/rand" "os" "os/exec" "path" @@ -25,6 +27,8 @@ type DecodedSymmetricEncryptionDetails = struct { Key []byte } +const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + func GetBase64DecodedSymmetricEncryptionDetails(key string, cipher string, IV string, tag string) (DecodedSymmetricEncryptionDetails, error) { cipherx, err := base64.StdEncoding.DecodeString(cipher) if err != nil { @@ -83,11 +87,15 @@ func GetInfisicalToken(cmd *cobra.Command) (token *models.TokenDetails, err erro return nil, err } + var source = "--token flag" + if infisicalToken == "" { // If no flag is passed, we first check for the universal auth access token env variable. infisicalToken = os.Getenv(INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME) + source = fmt.Sprintf("%s environment variable", INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME) if infisicalToken == "" { // If it's still empty after the first env check, we check for the service token env variable. infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME) + source = fmt.Sprintf("%s environment variable", INFISICAL_TOKEN_NAME) } } @@ -97,14 +105,16 @@ func GetInfisicalToken(cmd *cobra.Command) (token *models.TokenDetails, err erro if strings.HasPrefix(infisicalToken, "st.") { return &models.TokenDetails{ - Type: SERVICE_TOKEN_IDENTIFIER, - Token: infisicalToken, + Type: SERVICE_TOKEN_IDENTIFIER, + Token: infisicalToken, + Source: source, }, nil } return &models.TokenDetails{ - Type: UNIVERSAL_AUTH_TOKEN_IDENTIFIER, - Token: infisicalToken, + Type: UNIVERSAL_AUTH_TOKEN_IDENTIFIER, + Token: infisicalToken, + Source: source, }, nil } @@ -123,7 +133,7 @@ func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuth return tokenResponse, nil } -func RenewUniversalAuthAccessToken(accessToken string) (string, error) { +func RenewMachineIdentityAccessToken(accessToken string) (string, error) { httpClient := resty.New() httpClient.SetRetryCount(10000). @@ -134,7 +144,7 @@ func RenewUniversalAuthAccessToken(accessToken string) (string, error) { AccessToken: accessToken, } - tokenResponse, err := api.CallUniversalAuthRefreshAccessToken(httpClient, request) + tokenResponse, err := api.CallMachineIdentityRefreshAccessToken(httpClient, request) if err != nil { return "", err } @@ -246,3 +256,65 @@ func AppendAPIEndpoint(address string) string { } return address + "/api" } + +func ReadFileAsString(filePath string) (string, error) { + fileBytes, err := os.ReadFile(filePath) + + if err != nil { + return "", err + } + + return string(fileBytes), nil + +} + +func GetEnvVarOrFileContent(envName string, filePath string) (string, error) { + // First check if the environment variable is set + if envVarValue := os.Getenv(envName); envVarValue != "" { + return envVarValue, nil + } + + // If it's not set, try to read the file + fileContent, err := ReadFileAsString(filePath) + + if err != nil { + return "", fmt.Errorf("unable to read file content from file path '%s' [err=%v]", filePath, err) + } + + return fileContent, nil +} + +func GetCmdFlagOrEnv(cmd *cobra.Command, flag, envName string) (string, error) { + value, flagsErr := cmd.Flags().GetString(flag) + if flagsErr != nil { + return "", flagsErr + } + if value == "" { + value = os.Getenv(envName) + } + if value == "" { + return "", fmt.Errorf("please provide %s flag", flag) + } + return value, nil +} + +func GenerateRandomString(length int) string { + b := make([]byte, length) + for i := range b { + b[i] = charset[rand.Intn(len(charset))] + } + return string(b) +} + +func GenerateETagFromSecrets(secrets []models.SingleEnvironmentVariable) string { + sortedSecrets := SortSecretsByKeys(secrets) + content := []byte{} + + for _, secret := range sortedSecrets { + content = append(content, []byte(secret.Key)...) + content = append(content, []byte(secret.Value)...) + } + + hash := sha256.Sum256(content) + return fmt.Sprintf(`"%s"`, hex.EncodeToString(hash[:])) +} diff --git a/cli/packages/util/init.go b/cli/packages/util/init.go index 33350f3b77..4aecb2ab39 100644 --- a/cli/packages/util/init.go +++ b/cli/packages/util/init.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/Infisical/infisical-merge/packages/api" + "github.com/Infisical/infisical-merge/packages/config" "github.com/Infisical/infisical-merge/packages/models" ) @@ -11,7 +12,7 @@ func GetOrganizationsNameList(organizationResponse api.GetOrganizationsResponse) organizations := organizationResponse.Organizations if len(organizations) == 0 { - message := fmt.Sprintf("You don't have any organization created in Infisical. You must first create a organization at %s", INFISICAL_DEFAULT_URL) + message := fmt.Sprintf("You don't have any organization created in Infisical. You must first create a organization at %s", config.INFISICAL_URL) PrintErrorMessageAndExit(message) } @@ -37,7 +38,7 @@ func GetWorkspacesInOrganization(workspaceResponse api.GetWorkSpacesResponse, or } if len(filteredWorkspaces) == 0 { - message := fmt.Sprintf("You don't have any projects created in Infisical organization. You must first create a project at %s", INFISICAL_DEFAULT_URL) + message := fmt.Sprintf("You don't have any projects created in Infisical organization. You must first create a project at %s", config.INFISICAL_URL) PrintErrorMessageAndExit(message) } diff --git a/cli/packages/util/keyringwrapper.go b/cli/packages/util/keyringwrapper.go index 3bf2dd6c44..9c8211a3c1 100644 --- a/cli/packages/util/keyringwrapper.go +++ b/cli/packages/util/keyringwrapper.go @@ -1,6 +1,10 @@ package util import ( + "encoding/base64" + "fmt" + + "github.com/rs/zerolog/log" "github.com/zalando/go-keyring" ) @@ -20,16 +24,39 @@ func SetValueInKeyring(key, value string) error { PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical rest] then try again") } - return keyring.Set(currentVaultBackend, MAIN_KEYRING_SERVICE, key, value) + err = keyring.Set(currentVaultBackend, MAIN_KEYRING_SERVICE, key, value) + + if err != nil { + log.Debug().Msg(fmt.Sprintf("Error while setting default keyring: %v", err)) + configFile, _ := GetConfigFile() + + if configFile.VaultBackendPassphrase == "" { + encodedPassphrase := base64.StdEncoding.EncodeToString([]byte(GenerateRandomString(10))) // generate random passphrase + configFile.VaultBackendPassphrase = encodedPassphrase + configFile.VaultBackendType = VAULT_BACKEND_FILE_MODE + err = WriteConfigFile(&configFile) + if err != nil { + return err + } + + // We call this function at last to trigger the environment variable to be set + GetConfigFile() + } + + err = keyring.Set(VAULT_BACKEND_FILE_MODE, MAIN_KEYRING_SERVICE, key, value) + log.Debug().Msg(fmt.Sprintf("Error while setting file keyring: %v", err)) + } + + return err } func GetValueInKeyring(key string) (string, error) { currentVaultBackend, err := GetCurrentVaultBackend() if err != nil { - PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical rest] then try again") + PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical reset] then try again") } - return keyring.Get(currentVaultBackend, MAIN_KEYRING_SERVICE, key) + } func DeleteValueInKeyring(key string) error { diff --git a/cli/packages/util/secrets.go b/cli/packages/util/secrets.go index 27f0636a90..5e19ea6648 100644 --- a/cli/packages/util/secrets.go +++ b/cli/packages/util/secrets.go @@ -1,26 +1,28 @@ package util import ( + "crypto/rand" "encoding/base64" + "encoding/hex" "encoding/json" "errors" "fmt" "os" - "path" - "regexp" "strings" + "unicode" "github.com/Infisical/infisical-merge/packages/api" "github.com/Infisical/infisical-merge/packages/crypto" "github.com/Infisical/infisical-merge/packages/models" "github.com/go-resty/resty/v2" "github.com/rs/zerolog/log" + "github.com/zalando/go-keyring" ) -func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string, includeImports bool, recursive bool) ([]models.SingleEnvironmentVariable, api.GetServiceTokenDetailsResponse, error) { +func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string, includeImports bool, recursive bool, tagSlugs string, expandSecretReferences bool) ([]models.SingleEnvironmentVariable, error) { serviceTokenParts := strings.SplitN(fullServiceToken, ".", 4) if len(serviceTokenParts) < 4 { - return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("invalid service token entered. Please double check your service token and try again") + return nil, fmt.Errorf("invalid service token entered. Please double check your service token and try again") } serviceToken := fmt.Sprintf("%v.%v.%v", serviceTokenParts[0], serviceTokenParts[1], serviceTokenParts[2]) @@ -32,139 +34,61 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str serviceTokenDetails, err := api.CallGetServiceTokenDetailsV2(httpClient) if err != nil { - return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("unable to get service token details. [err=%v]", err) + return nil, fmt.Errorf("unable to get service token details. [err=%v]", err) } // if multiple scopes are there then user needs to specify which environment and secret path if environment == "" { if len(serviceTokenDetails.Scopes) != 1 { - return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("you need to provide the --env for multiple environment scoped token") + return nil, fmt.Errorf("you need to provide the --env for multiple environment scoped token") } else { environment = serviceTokenDetails.Scopes[0].Environment } } - encryptedSecrets, err := api.CallGetSecretsV3(httpClient, api.GetEncryptedSecretsV3Request{ - WorkspaceId: serviceTokenDetails.Workspace, - Environment: environment, - SecretPath: secretPath, - IncludeImport: includeImports, - Recursive: recursive, + rawSecrets, err := api.CallGetRawSecretsV3(httpClient, api.GetRawSecretsV3Request{ + WorkspaceId: serviceTokenDetails.Workspace, + Environment: environment, + SecretPath: secretPath, + IncludeImport: includeImports, + Recursive: recursive, + TagSlugs: tagSlugs, + ExpandSecretReferences: expandSecretReferences, }) - if err != nil { - return nil, api.GetServiceTokenDetailsResponse{}, err - } - - decodedSymmetricEncryptionDetails, err := GetBase64DecodedSymmetricEncryptionDetails(serviceTokenParts[3], serviceTokenDetails.EncryptedKey, serviceTokenDetails.Iv, serviceTokenDetails.Tag) - if err != nil { - return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("unable to decode symmetric encryption details [err=%v]", err) - } - - plainTextWorkspaceKey, err := crypto.DecryptSymmetric([]byte(serviceTokenParts[3]), decodedSymmetricEncryptionDetails.Cipher, decodedSymmetricEncryptionDetails.Tag, decodedSymmetricEncryptionDetails.IV) - if err != nil { - return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("unable to decrypt the required workspace key") - } - - plainTextSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, encryptedSecrets.Secrets) - if err != nil { - return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("unable to decrypt your secrets [err=%v]", err) - } - - if includeImports { - plainTextSecrets, err = InjectImportedSecret(plainTextWorkspaceKey, plainTextSecrets, encryptedSecrets.ImportedSecrets) - if err != nil { - return nil, api.GetServiceTokenDetailsResponse{}, err - } - } - - return plainTextSecrets, serviceTokenDetails, nil -} - -func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string, tagSlugs string, secretsPath string, includeImports bool, recursive bool) ([]models.SingleEnvironmentVariable, error) { - httpClient := resty.New() - httpClient.SetAuthToken(JTWToken). - SetHeader("Accept", "application/json") - - request := api.GetEncryptedWorkspaceKeyRequest{ - WorkspaceId: workspaceId, - } - - workspaceKeyResponse, err := api.CallGetEncryptedWorkspaceKey(httpClient, request) - if err != nil { - return nil, fmt.Errorf("unable to get your encrypted workspace key. [err=%v]", err) - } - - encryptedWorkspaceKey, err := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey) - if err != nil { - HandleError(err, "Unable to get bytes represented by the base64 for encryptedWorkspaceKey") - } - - encryptedWorkspaceKeySenderPublicKey, err := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey) - if err != nil { - HandleError(err, "Unable to get bytes represented by the base64 for encryptedWorkspaceKeySenderPublicKey") - } - - encryptedWorkspaceKeyNonce, err := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce) - if err != nil { - HandleError(err, "Unable to get bytes represented by the base64 for encryptedWorkspaceKeyNonce") - } - - currentUsersPrivateKey, err := base64.StdEncoding.DecodeString(receiversPrivateKey) - if err != nil { - HandleError(err, "Unable to get bytes represented by the base64 for currentUsersPrivateKey") - } - - if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 { - log.Debug().Msgf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey) - PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again") - } - - plainTextWorkspaceKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey) - - getSecretsRequest := api.GetEncryptedSecretsV3Request{ - WorkspaceId: workspaceId, - Environment: environmentName, - IncludeImport: includeImports, - Recursive: recursive, - // TagSlugs: tagSlugs, - } - - if secretsPath != "" { - getSecretsRequest.SecretPath = secretsPath - } - - encryptedSecrets, err := api.CallGetSecretsV3(httpClient, getSecretsRequest) if err != nil { return nil, err } - plainTextSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, encryptedSecrets.Secrets) - if err != nil { - return nil, fmt.Errorf("unable to decrypt your secrets [err=%v]", err) + plainTextSecrets := []models.SingleEnvironmentVariable{} + + for _, secret := range rawSecrets.Secrets { + plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, Type: secret.Type, WorkspaceId: secret.Workspace}) } if includeImports { - plainTextSecrets, err = InjectImportedSecret(plainTextWorkspaceKey, plainTextSecrets, encryptedSecrets.ImportedSecrets) + plainTextSecrets, err = InjectRawImportedSecret(plainTextSecrets, rawSecrets.Imports) if err != nil { return nil, err } } return plainTextSecrets, nil + } -func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool, recursive bool) (models.PlaintextSecretResult, error) { +func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool, recursive bool, tagSlugs string, expandSecretReferences bool) (models.PlaintextSecretResult, error) { httpClient := resty.New() httpClient.SetAuthToken(accessToken). SetHeader("Accept", "application/json") getSecretsRequest := api.GetRawSecretsV3Request{ - WorkspaceId: workspaceId, - Environment: environmentName, - IncludeImport: includeImports, - Recursive: recursive, - // TagSlugs: tagSlugs, + WorkspaceId: workspaceId, + Environment: environmentName, + IncludeImport: includeImports, + Recursive: recursive, + TagSlugs: tagSlugs, + ExpandSecretReferences: expandSecretReferences, } if secretsPath != "" { @@ -178,12 +102,9 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin } plainTextSecrets := []models.SingleEnvironmentVariable{} - if err != nil { - return models.PlaintextSecretResult{}, fmt.Errorf("unable to decrypt your secrets [err=%v]", err) - } for _, secret := range rawSecrets.Secrets { - plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, Type: secret.Type, WorkspaceId: secret.Workspace}) + plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, Type: secret.Type, WorkspaceId: secret.Workspace, SecretPath: secret.SecretPath}) } if includeImports { @@ -199,6 +120,37 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin }, nil } +func GetSinglePlainTextSecretByNameV3(accessToken string, workspaceId string, environmentName string, secretsPath string, secretName string) (models.SingleEnvironmentVariable, string, error) { + httpClient := resty.New() + httpClient.SetAuthToken(accessToken). + SetHeader("Accept", "application/json") + + getSecretsRequest := api.GetRawSecretV3ByNameRequest{ + WorkspaceID: workspaceId, + Environment: environmentName, + SecretName: secretName, + SecretPath: secretsPath, + } + + rawSecret, err := api.CallFetchSingleSecretByName(httpClient, getSecretsRequest) + + if err != nil { + return models.SingleEnvironmentVariable{}, "", err + } + + formattedSecrets := models.SingleEnvironmentVariable{ + Key: rawSecret.Secret.SecretKey, + WorkspaceId: rawSecret.Secret.Workspace, + Value: rawSecret.Secret.SecretValue, + Type: rawSecret.Secret.Type, + ID: rawSecret.Secret.ID, + Comment: rawSecret.Secret.SecretComment, + SecretPath: rawSecret.Secret.SecretPath, + } + + return formattedSecrets, rawSecret.ETag, nil +} + func CreateDynamicSecretLease(accessToken string, projectSlug string, environmentName string, secretsPath string, slug string, ttl string) (models.DynamicSecretLease, error) { httpClient := resty.New() httpClient.SetAuthToken(accessToken). @@ -224,34 +176,6 @@ func CreateDynamicSecretLease(accessToken string, projectSlug string, environmen }, nil } -func InjectImportedSecret(plainTextWorkspaceKey []byte, secrets []models.SingleEnvironmentVariable, importedSecrets []api.ImportedSecretV3) ([]models.SingleEnvironmentVariable, error) { - if importedSecrets == nil { - return secrets, nil - } - - hasOverriden := make(map[string]bool) - for _, sec := range secrets { - hasOverriden[sec.Key] = true - } - - for i := len(importedSecrets) - 1; i >= 0; i-- { - importSec := importedSecrets[i] - plainTextImportedSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, importSec.Secrets) - - if err != nil { - return nil, fmt.Errorf("unable to decrypt your imported secrets [err=%v]", err) - } - - for _, sec := range plainTextImportedSecrets { - if _, ok := hasOverriden[sec.Key]; !ok { - secrets = append(secrets, sec) - hasOverriden[sec.Key] = true - } - } - } - return secrets, nil -} - func InjectRawImportedSecret(secrets []models.SingleEnvironmentVariable, importedSecrets []api.ImportedRawSecretV3) ([]models.SingleEnvironmentVariable, error) { if importedSecrets == nil { return secrets, nil @@ -307,32 +231,33 @@ func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tag } func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectConfigFilePath string) ([]models.SingleEnvironmentVariable, error) { - isConnected := CheckIsConnectedToInternet() var secretsToReturn []models.SingleEnvironmentVariable // var serviceTokenDetails api.GetServiceTokenDetailsResponse var errorToReturn error if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" { - if isConnected { - log.Debug().Msg("GetAllEnvironmentVariables: Connected to internet, checking logged in creds") - - if projectConfigFilePath == "" { - RequireLocalWorkspaceFile() - } else { - ValidateWorkspaceFile(projectConfigFilePath) - } - - RequireLogin() + if projectConfigFilePath == "" { + RequireLocalWorkspaceFile() + } else { + ValidateWorkspaceFile(projectConfigFilePath) } + RequireLogin() + log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details") loggedInUserDetails, err := GetCurrentLoggedInUserDetails() + isConnected := ValidateInfisicalAPIConnection() + + if isConnected { + log.Debug().Msg("GetAllEnvironmentVariables: Connected to Infisical instance, checking logged in creds") + } + if err != nil { return nil, err } - if loggedInUserDetails.LoginExpired { + if isConnected && loggedInUserDetails.LoginExpired { PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again") } @@ -358,29 +283,37 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo infisicalDotJson.WorkspaceId = params.WorkspaceId } - secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, infisicalDotJson.WorkspaceId, - params.Environment, params.TagSlugs, params.SecretsPath, params.IncludeImport, params.Recursive) - log.Debug().Msgf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", errorToReturn) + res, err := GetPlainTextSecretsV3(loggedInUserDetails.UserCredentials.JTWToken, infisicalDotJson.WorkspaceId, + params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs, true) + log.Debug().Msgf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", err) - backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32] - if errorToReturn == nil { - WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey, secretsToReturn) + if err == nil { + backupEncryptionKey, err := GetBackupEncryptionKey() + if err != nil { + return nil, err + } + WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey, res.Secrets) } + secretsToReturn = res.Secrets + errorToReturn = err // only attempt to serve cached secrets if no internet connection and if at least one secret cached if !isConnected { - backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey) - if len(backedSecrets) > 0 { - PrintWarning("Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug") - secretsToReturn = backedSecrets - errorToReturn = err + backupEncryptionKey, _ := GetBackupEncryptionKey() + if backupEncryptionKey != nil { + backedUpSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey) + if len(backedUpSecrets) > 0 { + PrintWarning("Unable to fetch the latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug") + secretsToReturn = backedUpSecrets + errorToReturn = err + } } } } else { if params.InfisicalToken != "" { log.Debug().Msg("Trying to fetch secrets using service token") - secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive) + secretsToReturn, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs, params.ExpandSecretReferences) } else if params.UniversalAuthAccessToken != "" { if params.WorkspaceId == "" { @@ -388,7 +321,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo } log.Debug().Msg("Trying to fetch secrets using universal auth") - res, err := GetPlainTextSecretsViaMachineIdentity(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive) + res, err := GetPlainTextSecretsV3(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs, params.ExpandSecretReferences) errorToReturn = err secretsToReturn = res.Secrets @@ -398,44 +331,6 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo return secretsToReturn, errorToReturn } -var secRefRegex = regexp.MustCompile(`\${([^\}]*)}`) - -func recursivelyExpandSecret(expandedSecs map[string]string, interpolatedSecs map[string]string, crossSecRefFetch func(env string, path []string, key string) string, key string) string { - if v, ok := expandedSecs[key]; ok { - return v - } - - interpolatedVal, ok := interpolatedSecs[key] - if !ok { - HandleError(fmt.Errorf("could not find refered secret - %s", key), "Kindly check whether its provided") - } - - refs := secRefRegex.FindAllStringSubmatch(interpolatedVal, -1) - for _, val := range refs { - // key: "${something}" val: [${something},something] - interpolatedExp, interpolationKey := val[0], val[1] - ref := strings.Split(interpolationKey, ".") - - // ${KEY1} => [key1] - if len(ref) == 1 { - val := recursivelyExpandSecret(expandedSecs, interpolatedSecs, crossSecRefFetch, interpolationKey) - interpolatedVal = strings.ReplaceAll(interpolatedVal, interpolatedExp, val) - continue - } - - // cross board reference ${env.folder.key1} => [env folder key1] - if len(ref) > 1 { - secEnv, tmpSecPath, secKey := ref[0], ref[1:len(ref)-1], ref[len(ref)-1] - interpolatedSecs[interpolationKey] = crossSecRefFetch(secEnv, tmpSecPath, secKey) // get the reference value - val := recursivelyExpandSecret(expandedSecs, interpolatedSecs, crossSecRefFetch, interpolationKey) - interpolatedVal = strings.ReplaceAll(interpolatedVal, interpolatedExp, val) - } - - } - expandedSecs[key] = interpolatedVal - return interpolatedVal -} - func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]models.SingleEnvironmentVariable { secretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets)) @@ -446,70 +341,6 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod return secretMapByName } -func ExpandSecrets(secrets []models.SingleEnvironmentVariable, auth models.ExpandSecretsAuthentication, projectConfigPathDir string) []models.SingleEnvironmentVariable { - expandedSecs := make(map[string]string) - interpolatedSecs := make(map[string]string) - // map[env.secret-path][keyname]Secret - crossEnvRefSecs := make(map[string]map[string]models.SingleEnvironmentVariable) // a cache to hold all cross board reference secrets - - for _, sec := range secrets { - // get all references in a secret - refs := secRefRegex.FindAllStringSubmatch(sec.Value, -1) - // nil means its a secret without reference - if refs == nil { - expandedSecs[sec.Key] = sec.Value // atomic secrets without any interpolation - } else { - interpolatedSecs[sec.Key] = sec.Value - } - } - - for i, sec := range secrets { - // already present pick that up - if expandedVal, ok := expandedSecs[sec.Key]; ok { - secrets[i].Value = expandedVal - continue - } - - expandedVal := recursivelyExpandSecret(expandedSecs, interpolatedSecs, func(env string, secPaths []string, secKey string) string { - secPaths = append([]string{"/"}, secPaths...) - secPath := path.Join(secPaths...) - - secPathDot := strings.Join(secPaths, ".") - uniqKey := fmt.Sprintf("%s.%s", env, secPathDot) - - if crossRefSec, ok := crossEnvRefSecs[uniqKey]; !ok { - - var refSecs []models.SingleEnvironmentVariable - var err error - - // if not in cross reference cache, fetch it from server - if auth.InfisicalToken != "" { - refSecs, err = GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: env, InfisicalToken: auth.InfisicalToken, SecretsPath: secPath}, projectConfigPathDir) - } else if auth.UniversalAuthAccessToken != "" { - refSecs, err = GetAllEnvironmentVariables((models.GetAllSecretsParameters{Environment: env, UniversalAuthAccessToken: auth.UniversalAuthAccessToken, SecretsPath: secPath, WorkspaceId: sec.WorkspaceId}), projectConfigPathDir) - } else if IsLoggedIn() { - refSecs, err = GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: env, SecretsPath: secPath}, projectConfigPathDir) - } else { - HandleError(errors.New("no authentication provided"), "Please provide authentication to fetch secrets") - } - if err != nil { - HandleError(err, fmt.Sprintf("Could not fetch secrets in environment: %s secret-path: %s", env, secPath), "If you are using a service token to fetch secrets, please ensure it is valid") - } - refSecsByKey := getSecretsByKeys(refSecs) - // save it to avoid calling api again for same environment and folder path - crossEnvRefSecs[uniqKey] = refSecsByKey - return refSecsByKey[secKey].Value - - } else { - return crossRefSec[secKey].Value - } - }, sec.Key) - - secrets[i].Value = expandedVal - } - return secrets -} - func OverrideSecrets(secrets []models.SingleEnvironmentVariable, secretType string) []models.SingleEnvironmentVariable { personalSecrets := make(map[string]models.SingleEnvironmentVariable) sharedSecrets := make(map[string]models.SingleEnvironmentVariable) @@ -553,89 +384,30 @@ func OverrideSecrets(secrets []models.SingleEnvironmentVariable, secretType stri return secretsToReturn } -func GetPlainTextSecrets(key []byte, encryptedSecrets []api.EncryptedSecretV3) ([]models.SingleEnvironmentVariable, error) { - plainTextSecrets := []models.SingleEnvironmentVariable{} - for _, secret := range encryptedSecrets { - // Decrypt key - key_iv, err := base64.StdEncoding.DecodeString(secret.SecretKeyIV) - if err != nil { - return nil, fmt.Errorf("unable to decode secret IV for secret key") +func GetBackupEncryptionKey() ([]byte, error) { + encryptionKey, err := GetValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY) + if err != nil { + if err == keyring.ErrUnsupportedPlatform { + return nil, errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token") + } else if err == keyring.ErrNotFound { + // generate a new key + randomizedKey := make([]byte, 16) + rand.Read(randomizedKey) + encryptionKey = hex.EncodeToString(randomizedKey) + if err := SetValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY, encryptionKey); err != nil { + return nil, err + } + return []byte(encryptionKey), nil + } else { + return nil, fmt.Errorf("something went wrong, failed to retrieve value from system keyring [error=%v]", err) } - - key_tag, err := base64.StdEncoding.DecodeString(secret.SecretKeyTag) - if err != nil { - return nil, fmt.Errorf("unable to decode secret authentication tag for secret key") - } - - key_ciphertext, err := base64.StdEncoding.DecodeString(secret.SecretKeyCiphertext) - if err != nil { - return nil, fmt.Errorf("unable to decode secret cipher text for secret key") - } - - plainTextKey, err := crypto.DecryptSymmetric(key, key_ciphertext, key_tag, key_iv) - if err != nil { - return nil, fmt.Errorf("unable to symmetrically decrypt secret key") - } - - // Decrypt value - value_iv, err := base64.StdEncoding.DecodeString(secret.SecretValueIV) - if err != nil { - return nil, fmt.Errorf("unable to decode secret IV for secret value") - } - - value_tag, err := base64.StdEncoding.DecodeString(secret.SecretValueTag) - if err != nil { - return nil, fmt.Errorf("unable to decode secret authentication tag for secret value") - } - - value_ciphertext, _ := base64.StdEncoding.DecodeString(secret.SecretValueCiphertext) - if err != nil { - return nil, fmt.Errorf("unable to decode secret cipher text for secret key") - } - - plainTextValue, err := crypto.DecryptSymmetric(key, value_ciphertext, value_tag, value_iv) - if err != nil { - return nil, fmt.Errorf("unable to symmetrically decrypt secret value") - } - - // Decrypt comment - comment_iv, err := base64.StdEncoding.DecodeString(secret.SecretCommentIV) - if err != nil { - return nil, fmt.Errorf("unable to decode secret IV for secret value") - } - - comment_tag, err := base64.StdEncoding.DecodeString(secret.SecretCommentTag) - if err != nil { - return nil, fmt.Errorf("unable to decode secret authentication tag for secret value") - } - - comment_ciphertext, _ := base64.StdEncoding.DecodeString(secret.SecretCommentCiphertext) - if err != nil { - return nil, fmt.Errorf("unable to decode secret cipher text for secret key") - } - - plainTextComment, err := crypto.DecryptSymmetric(key, comment_ciphertext, comment_tag, comment_iv) - if err != nil { - return nil, fmt.Errorf("unable to symmetrically decrypt secret comment") - } - - plainTextSecret := models.SingleEnvironmentVariable{ - Key: string(plainTextKey), - Value: string(plainTextValue), - Type: string(secret.Type), - ID: secret.ID, - Tags: secret.Tags, - Comment: string(plainTextComment), - } - - plainTextSecrets = append(plainTextSecrets, plainTextSecret) } - - return plainTextSecrets, nil + return []byte(encryptionKey), nil } -func WriteBackupSecrets(workspace string, environment string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error { - fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment) +func WriteBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error { + formattedPath := strings.ReplaceAll(secretsPath, "/", "-") + fileName := fmt.Sprintf("project_secrets_%s_%s_%s.json", workspace, environment, formattedPath) secrets_backup_folder_name := "secrets-backup" _, fullConfigFileDirPath, err := GetFullConfigFilePath() @@ -651,19 +423,12 @@ func WriteBackupSecrets(workspace string, environment string, encryptionKey []by return err } } - - var encryptedSecrets []models.SymmetricEncryptionResult - for _, secret := range secrets { - marshaledSecrets, _ := json.Marshal(secret) - result, err := crypto.EncryptSymmetric(marshaledSecrets, encryptionKey) - if err != nil { - return err - } - - encryptedSecrets = append(encryptedSecrets, result) + marshaledSecrets, _ := json.Marshal(secrets) + result, err := crypto.EncryptSymmetric(marshaledSecrets, encryptionKey) + if err != nil { + return fmt.Errorf("WriteBackupSecrets: Unable to encrypt local secret backup to file [err=%s]", err) } - - listOfSecretsMarshalled, _ := json.Marshal(encryptedSecrets) + listOfSecretsMarshalled, _ := json.Marshal(result) err = os.WriteFile(fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName), listOfSecretsMarshalled, 0600) if err != nil { return fmt.Errorf("WriteBackupSecrets: Unable to write backup secrets to file [err=%s]", err) @@ -672,8 +437,9 @@ func WriteBackupSecrets(workspace string, environment string, encryptionKey []by return nil } -func ReadBackupSecrets(workspace string, environment string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) { - fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment) +func ReadBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) { + formattedPath := strings.ReplaceAll(secretsPath, "/", "-") + fileName := fmt.Sprintf("project_secrets_%s_%s_%s.json", workspace, environment, formattedPath) secrets_backup_folder_name := "secrets-backup" _, fullConfigFileDirPath, err := GetFullConfigFilePath() @@ -693,27 +459,19 @@ func ReadBackupSecrets(workspace string, environment string, encryptionKey []byt return nil, err } - var listOfEncryptedBackupSecrets []models.SymmetricEncryptionResult - - _ = json.Unmarshal(encryptedBackupSecretsAsBytes, &listOfEncryptedBackupSecrets) - - var plainTextSecrets []models.SingleEnvironmentVariable - for _, encryptedSecret := range listOfEncryptedBackupSecrets { - result, err := crypto.DecryptSymmetric(encryptionKey, encryptedSecret.CipherText, encryptedSecret.AuthTag, encryptedSecret.Nonce) - if err != nil { - return nil, err - } - - var plainTextSecret models.SingleEnvironmentVariable - - err = json.Unmarshal(result, &plainTextSecret) - if err != nil { - return nil, err - } - - plainTextSecrets = append(plainTextSecrets, plainTextSecret) + var encryptedBackUpSecrets models.SymmetricEncryptionResult + err = json.Unmarshal(encryptedBackupSecretsAsBytes, &encryptedBackUpSecrets) + if err != nil { + return nil, fmt.Errorf("ReadBackupSecrets: unable to parse encrypted backup secrets. The secrets backup may be malformed [err=%s]", err) } + result, err := crypto.DecryptSymmetric(encryptionKey, encryptedBackUpSecrets.CipherText, encryptedBackUpSecrets.AuthTag, encryptedBackUpSecrets.Nonce) + if err != nil { + return nil, fmt.Errorf("ReadBackupSecrets: unable to decrypt encrypted backup secrets [err=%s]", err) + } + var plainTextSecrets []models.SingleEnvironmentVariable + _ = json.Unmarshal(result, &plainTextSecrets) + return plainTextSecrets, nil } @@ -727,6 +485,8 @@ func DeleteBackupSecrets() error { } fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name) + DeleteValueInKeyring(INFISICAL_BACKUP_SECRET) + DeleteValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY) return os.RemoveAll(fullPathToSecretsBackupFolder) } @@ -803,3 +563,144 @@ func GetPlainTextWorkspaceKey(authenticationToken string, receiverPrivateKey str return crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey), nil } + +func SetRawSecrets(secretArgs []string, secretType string, environmentName string, secretsPath string, projectId string, tokenDetails *models.TokenDetails) ([]models.SecretSetOperation, error) { + + if tokenDetails == nil { + return nil, fmt.Errorf("unable to process set secret operations, token details are missing") + } + + getAllEnvironmentVariablesRequest := models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, WorkspaceId: projectId} + if tokenDetails.Type == UNIVERSAL_AUTH_TOKEN_IDENTIFIER { + getAllEnvironmentVariablesRequest.UniversalAuthAccessToken = tokenDetails.Token + } + + if tokenDetails.Type == SERVICE_TOKEN_IDENTIFIER { + getAllEnvironmentVariablesRequest.InfisicalToken = tokenDetails.Token + } + + httpClient := resty.New(). + SetAuthToken(tokenDetails.Token). + SetHeader("Accept", "application/json") + + // pull current secrets + secrets, err := GetAllEnvironmentVariables(getAllEnvironmentVariablesRequest, "") + if err != nil { + return nil, fmt.Errorf("unable to retrieve secrets [err=%v]", err) + } + + secretsToCreate := []api.RawSecret{} + secretsToModify := []api.RawSecret{} + secretOperations := []models.SecretSetOperation{} + + sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets)) + personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets)) + + for _, secret := range secrets { + if secret.Type == SECRET_TYPE_PERSONAL { + personalSecretMapByName[secret.Key] = secret + } else { + sharedSecretMapByName[secret.Key] = secret + } + } + + for _, arg := range secretArgs { + splitKeyValueFromArg := strings.SplitN(arg, "=", 2) + if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" { + PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again") + } + + if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) { + PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again") + } + + // Key and value from argument + key := splitKeyValueFromArg[0] + value := splitKeyValueFromArg[1] + + var existingSecret models.SingleEnvironmentVariable + var doesSecretExist bool + + if secretType == SECRET_TYPE_SHARED { + existingSecret, doesSecretExist = sharedSecretMapByName[key] + } else { + existingSecret, doesSecretExist = personalSecretMapByName[key] + } + + if doesSecretExist { + // case: secret exists in project so it needs to be modified + encryptedSecretDetails := api.RawSecret{ + ID: existingSecret.ID, + SecretValue: value, + SecretKey: key, + Type: existingSecret.Type, + } + + // Only add to modifications if the value is different + if existingSecret.Value != value { + secretsToModify = append(secretsToModify, encryptedSecretDetails) + secretOperations = append(secretOperations, models.SecretSetOperation{ + SecretKey: key, + SecretValue: value, + SecretOperation: "SECRET VALUE MODIFIED", + }) + } else { + // Current value is same as existing so no change + secretOperations = append(secretOperations, models.SecretSetOperation{ + SecretKey: key, + SecretValue: value, + SecretOperation: "SECRET VALUE UNCHANGED", + }) + } + + } else { + // case: secret doesn't exist in project so it needs to be created + encryptedSecretDetails := api.RawSecret{ + SecretKey: key, + SecretValue: value, + Type: secretType, + } + secretsToCreate = append(secretsToCreate, encryptedSecretDetails) + secretOperations = append(secretOperations, models.SecretSetOperation{ + SecretKey: key, + SecretValue: value, + SecretOperation: "SECRET CREATED", + }) + } + } + + for _, secret := range secretsToCreate { + createSecretRequest := api.CreateRawSecretV3Request{ + SecretName: secret.SecretKey, + SecretValue: secret.SecretValue, + Type: secret.Type, + SecretPath: secretsPath, + WorkspaceID: projectId, + Environment: environmentName, + } + + err = api.CallCreateRawSecretsV3(httpClient, createSecretRequest) + if err != nil { + return nil, fmt.Errorf("unable to process new secret creations [err=%v]", err) + } + } + + for _, secret := range secretsToModify { + updateSecretRequest := api.UpdateRawSecretByNameV3Request{ + SecretName: secret.SecretKey, + SecretValue: secret.SecretValue, + SecretPath: secretsPath, + WorkspaceID: projectId, + Environment: environmentName, + Type: secret.Type, + } + + err = api.CallUpdateRawSecretsV3(httpClient, updateSecretRequest) + if err != nil { + return nil, fmt.Errorf("unable to process secret update request [err=%v]", err) + } + } + + return secretOperations, nil + +} diff --git a/cli/packages/util/vault.go b/cli/packages/util/vault.go index 14d6d10d9f..5907d93fc8 100644 --- a/cli/packages/util/vault.go +++ b/cli/packages/util/vault.go @@ -11,11 +11,11 @@ func GetCurrentVaultBackend() (string, error) { } if configFile.VaultBackendType == "" { - return "auto", nil + return VAULT_BACKEND_AUTO_MODE, nil } - if configFile.VaultBackendType != "auto" && configFile.VaultBackendType != "file" { - return "auto", nil + if configFile.VaultBackendType != VAULT_BACKEND_AUTO_MODE && configFile.VaultBackendType != VAULT_BACKEND_FILE_MODE { + return VAULT_BACKEND_AUTO_MODE, nil } return configFile.VaultBackendType, nil diff --git a/cli/scripts/export_test_env.sh b/cli/scripts/export_test_env.sh new file mode 100644 index 0000000000..0b242281d9 --- /dev/null +++ b/cli/scripts/export_test_env.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +TEST_ENV_FILE=".test.env" + +# Check if the .env file exists +if [ ! -f "$TEST_ENV_FILE" ]; then + echo "$TEST_ENV_FILE does not exist." + exit 1 +fi + +# Export the variables +while IFS= read -r line +do + # Skip empty lines and lines starting with # + if [[ -z "$line" || "$line" =~ ^\# ]]; then + continue + fi + # Read the key-value pair + IFS='=' read -r key value <<< "$line" + eval export $key=\$value +done < "$TEST_ENV_FILE" + +echo "Test environment variables set." diff --git a/cli/secret-render-template b/cli/secret-render-template index 32ab2331a5..41489a0741 100644 --- a/cli/secret-render-template +++ b/cli/secret-render-template @@ -1,5 +1,5 @@ -{{- with secret "6553ccb2b7da580d7f6e7260" "dev" "/" }} +{{- with secret "8fac9f01-4a81-44d7-8ff0-3d7be684f56f" "staging" "/" `{"recursive":true, "expandSecretReferences": false}` }} {{- range . }} {{ .Key }}={{ .Value }} {{- end }} -{{- end }} \ No newline at end of file +{{- end }} diff --git a/cli/test/.snapshots/test-TestUniversalAuth_SecretsGetWrongEnvironment b/cli/test/.snapshots/test-TestUniversalAuth_SecretsGetWrongEnvironment index c3811bd22a..ff7925d757 100644 --- a/cli/test/.snapshots/test-TestUniversalAuth_SecretsGetWrongEnvironment +++ b/cli/test/.snapshots/test-TestUniversalAuth_SecretsGetWrongEnvironment @@ -1,4 +1,4 @@ -error: CallGetRawSecretsV3: Unsuccessful response [GET https://app.infisical.com/api/v3/secrets/raw?environment=invalid-env&include_imports=true&recursive=true&secretPath=%2F&workspaceId=bef697d4-849b-4a75-b284-0922f87f8ba2] [status-code=500] [response={"statusCode":500,"error":"Internal Server Error","message":"'invalid-env' environment not found in project with ID bef697d4-849b-4a75-b284-0922f87f8ba2"}] +error: CallGetRawSecretsV3: Unsuccessful response [GET https://app.infisical.com/api/v3/secrets/raw?environment=invalid-env&expandSecretReferences=true&include_imports=true&recursive=true&secretPath=%2F&workspaceId=bef697d4-849b-4a75-b284-0922f87f8ba2] [status-code=404] [response={"statusCode":404,"message":"Environment with slug 'invalid-env' in project with ID bef697d4-849b-4a75-b284-0922f87f8ba2 not found","error":"NotFound"}] If this issue continues, get support at https://infisical.com/slack diff --git a/cli/test/.snapshots/test-TestUserAuth_SecretsGetAll b/cli/test/.snapshots/test-TestUserAuth_SecretsGetAll new file mode 100644 index 0000000000..260607e976 --- /dev/null +++ b/cli/test/.snapshots/test-TestUserAuth_SecretsGetAll @@ -0,0 +1,7 @@ +┌───────────────┬──────────────┬─────────────┐ +│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │ +├───────────────┼──────────────┼─────────────┤ +│ TEST-SECRET-1 │ test-value-1 │ shared │ +│ TEST-SECRET-2 │ test-value-2 │ shared │ +│ TEST-SECRET-3 │ test-value-3 │ shared │ +└───────────────┴──────────────┴─────────────┘ diff --git a/cli/test/.snapshots/test-testUserAuth_SecretsGetAllWithoutConnection b/cli/test/.snapshots/test-testUserAuth_SecretsGetAllWithoutConnection new file mode 100644 index 0000000000..71a189a659 --- /dev/null +++ b/cli/test/.snapshots/test-testUserAuth_SecretsGetAllWithoutConnection @@ -0,0 +1,8 @@ +Warning: Unable to fetch the latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug +┌───────────────┬──────────────┬─────────────┐ +│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │ +├───────────────┼──────────────┼─────────────┤ +│ TEST-SECRET-1 │ test-value-1 │ shared │ +│ TEST-SECRET-2 │ test-value-2 │ shared │ +│ TEST-SECRET-3 │ test-value-3 │ shared │ +└───────────────┴──────────────┴─────────────┘ diff --git a/cli/test/export_test.go b/cli/test/export_test.go index 9a936871dc..c44bf20af0 100644 --- a/cli/test/export_test.go +++ b/cli/test/export_test.go @@ -8,7 +8,6 @@ import ( func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent") @@ -24,8 +23,6 @@ func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) { } func TestServiceToken_ExportSecretsWithImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent") if err != nil { @@ -41,8 +38,6 @@ func TestServiceToken_ExportSecretsWithImports(t *testing.T) { func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false") if err != nil { @@ -57,8 +52,6 @@ func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) { } func TestServiceToken_ExportSecretsWithoutImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false") if err != nil { diff --git a/cli/test/helper.go b/cli/test/helper.go index 995367c4b1..819f4c4c9d 100644 --- a/cli/test/helper.go +++ b/cli/test/helper.go @@ -2,10 +2,10 @@ package tests import ( "fmt" + "log" "os" "os/exec" "strings" - "testing" ) const ( @@ -23,6 +23,8 @@ type Credentials struct { ServiceToken string ProjectID string EnvSlug string + UserEmail string + UserPassword string } var creds = Credentials{ @@ -32,18 +34,21 @@ var creds = Credentials{ ServiceToken: os.Getenv("CLI_TESTS_SERVICE_TOKEN"), ProjectID: os.Getenv("CLI_TESTS_PROJECT_ID"), EnvSlug: os.Getenv("CLI_TESTS_ENV_SLUG"), + UserEmail: os.Getenv("CLI_TESTS_USER_EMAIL"), + UserPassword: os.Getenv("CLI_TESTS_USER_PASSWORD"), } func ExecuteCliCommand(command string, args ...string) (string, error) { cmd := exec.Command(command, args...) output, err := cmd.CombinedOutput() if err != nil { + fmt.Println(fmt.Sprint(err) + ": " + string(output)) return strings.TrimSpace(string(output)), err } return strings.TrimSpace(string(output)), nil } -func SetupCli(t *testing.T) { +func SetupCli() { if creds.ClientID == "" || creds.ClientSecret == "" || creds.ServiceToken == "" || creds.ProjectID == "" || creds.EnvSlug == "" { panic("Missing required environment variables") @@ -57,7 +62,7 @@ func SetupCli(t *testing.T) { if !alreadyBuilt { if err := exec.Command("go", "build", "../.").Run(); err != nil { - t.Fatal(err) + log.Fatal(err) } } diff --git a/cli/test/login_test.go b/cli/test/login_test.go index 0f45914132..71273a3ec7 100644 --- a/cli/test/login_test.go +++ b/cli/test/login_test.go @@ -1,14 +1,124 @@ package tests import ( + "log" + "os/exec" + "strings" "testing" + "github.com/creack/pty" "github.com/stretchr/testify/assert" ) -func MachineIdentityLoginCmd(t *testing.T) { - SetupCli(t) +func UserInitCmd() { + c := exec.Command(FORMATTED_CLI_NAME, "init") + ptmx, err := pty.Start(c) + if err != nil { + log.Fatalf("error running CLI command: %v", err) + } + defer func() { _ = ptmx.Close() }() + stepChan := make(chan int, 10) + + go func() { + buf := make([]byte, 1024) + step := -1 + for { + n, err := ptmx.Read(buf) + if n > 0 { + terminalOut := string(buf) + if strings.Contains(terminalOut, "Which Infisical organization would you like to select a project from?") && step < 0 { + step += 1 + stepChan <- step + } else if strings.Contains(terminalOut, "Which of your Infisical projects would you like to connect this project to?") && step < 1 { + step += 1; + stepChan <- step + } + } + if err != nil { + close(stepChan) + return + } + } + }() + + for i := range stepChan { + switch i { + case 0: + ptmx.Write([]byte("\n")) + case 1: + ptmx.Write([]byte("\n")) + } + } +} + +func UserLoginCmd() { + // set vault to file because CI has no keyring + vaultCmd := exec.Command(FORMATTED_CLI_NAME, "vault", "set", "file") + _, err := vaultCmd.Output() + if err != nil { + log.Fatalf("error setting vault: %v", err) + } + + // Start programmatic interaction with CLI + c := exec.Command(FORMATTED_CLI_NAME, "login", "--interactive") + ptmx, err := pty.Start(c) + if err != nil { + log.Fatalf("error running CLI command: %v", err) + } + defer func() { _ = ptmx.Close() }() + + stepChan := make(chan int, 10) + + go func() { + buf := make([]byte, 1024) + step := -1 + for { + n, err := ptmx.Read(buf) + if n > 0 { + terminalOut := string(buf) + if strings.Contains(terminalOut, "Infisical Cloud") && step < 0 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Email") && step < 1 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Password") && step < 2 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Infisical organization") && step < 3 { + step += 1; + stepChan <- step + } else if strings.Contains(terminalOut, "Enter passphrase") && step < 4 { + step += 1; + stepChan <- step + } + } + if err != nil { + close(stepChan) + return + } + } + }() + + for i := range stepChan { + switch i { + case 0: + ptmx.Write([]byte("\n")) + case 1: + ptmx.Write([]byte(creds.UserEmail)) + ptmx.Write([]byte("\n")) + case 2: + ptmx.Write([]byte(creds.UserPassword)) + ptmx.Write([]byte("\n")) + case 3: + ptmx.Write([]byte("\n")) + } + } + +} + +func MachineIdentityLoginCmd(t *testing.T) { if creds.UAAccessToken != "" { return } diff --git a/cli/test/main_test.go b/cli/test/main_test.go new file mode 100644 index 0000000000..e14893aec0 --- /dev/null +++ b/cli/test/main_test.go @@ -0,0 +1,23 @@ +package tests + +import ( + "fmt" + "os" + "testing" +) + +func TestMain(m *testing.M) { + // Setup + fmt.Println("Setting up CLI...") + SetupCli() + fmt.Println("Performing user login...") + UserLoginCmd() + fmt.Println("Performing infisical init...") + UserInitCmd() + + // Run the tests + code := m.Run() + + // Exit + os.Exit(code) +} diff --git a/cli/test/run_test.go b/cli/test/run_test.go index 808f4f14ff..d2c6021cc2 100644 --- a/cli/test/run_test.go +++ b/cli/test/run_test.go @@ -8,8 +8,6 @@ import ( ) func TestServiceToken_RunCmdRecursiveAndImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world") if err != nil { @@ -25,8 +23,6 @@ func TestServiceToken_RunCmdRecursiveAndImports(t *testing.T) { } } func TestServiceToken_RunCmdWithImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world") if err != nil { @@ -44,8 +40,6 @@ func TestServiceToken_RunCmdWithImports(t *testing.T) { func TestUniversalAuth_RunCmdRecursiveAndImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world") if err != nil { @@ -63,8 +57,6 @@ func TestUniversalAuth_RunCmdRecursiveAndImports(t *testing.T) { func TestUniversalAuth_RunCmdWithImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world") if err != nil { @@ -83,8 +75,6 @@ func TestUniversalAuth_RunCmdWithImports(t *testing.T) { func TestUniversalAuth_RunCmdWithoutImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world") if err != nil { @@ -101,8 +91,6 @@ func TestUniversalAuth_RunCmdWithoutImports(t *testing.T) { } func TestServiceToken_RunCmdWithoutImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world") if err != nil { diff --git a/cli/test/secrets_by_name_test.go b/cli/test/secrets_by_name_test.go index 440324e1ac..26a8314bb4 100644 --- a/cli/test/secrets_by_name_test.go +++ b/cli/test/secrets_by_name_test.go @@ -7,8 +7,6 @@ import ( ) func TestServiceToken_GetSecretsByNameRecursive(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -23,8 +21,6 @@ func TestServiceToken_GetSecretsByNameRecursive(t *testing.T) { } func TestServiceToken_GetSecretsByNameWithNotFoundSecret(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -39,8 +35,6 @@ func TestServiceToken_GetSecretsByNameWithNotFoundSecret(t *testing.T) { } func TestServiceToken_GetSecretsByNameWithImports(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -56,8 +50,6 @@ func TestServiceToken_GetSecretsByNameWithImports(t *testing.T) { func TestUniversalAuth_GetSecretsByNameRecursive(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -73,8 +65,6 @@ func TestUniversalAuth_GetSecretsByNameRecursive(t *testing.T) { func TestUniversalAuth_GetSecretsByNameWithNotFoundSecret(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -90,8 +80,6 @@ func TestUniversalAuth_GetSecretsByNameWithNotFoundSecret(t *testing.T) { func TestUniversalAuth_GetSecretsByNameWithImports(t *testing.T) { MachineIdentityLoginCmd(t) - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { diff --git a/cli/test/secrets_test.go b/cli/test/secrets_test.go index 453666406d..f11392f52e 100644 --- a/cli/test/secrets_test.go +++ b/cli/test/secrets_test.go @@ -3,12 +3,11 @@ package tests import ( "testing" + "github.com/Infisical/infisical-merge/packages/util" "github.com/bradleyjkemp/cupaloy/v2" ) func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") if err != nil { @@ -23,8 +22,6 @@ func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { } func TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) { - SetupCli(t) - output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") if err != nil { @@ -39,7 +36,6 @@ func TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing. } func TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { - SetupCli(t) MachineIdentityLoginCmd(t) output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent") @@ -56,7 +52,6 @@ func TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) { } func TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) { - SetupCli(t) MachineIdentityLoginCmd(t) output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") @@ -73,7 +68,6 @@ func TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing } func TestUniversalAuth_SecretsGetWrongEnvironment(t *testing.T) { - SetupCli(t) MachineIdentityLoginCmd(t) output, _ := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", "invalid-env", "--recursive", "--silent") @@ -85,3 +79,45 @@ func TestUniversalAuth_SecretsGetWrongEnvironment(t *testing.T) { } } + +func TestUserAuth_SecretsGetAll(t *testing.T) { + output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") + if err != nil { + t.Fatalf("error running CLI command: %v", err) + } + + // Use cupaloy to snapshot test the output + err = cupaloy.Snapshot(output) + if err != nil { + t.Fatalf("snapshot failed: %v", err) + } + + // explicitly called here because it should happen directly after successful secretsGetAll + // testUserAuth_SecretsGetAllWithoutConnection(t) +} + +func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) { + originalConfigFile, err := util.GetConfigFile() + if err != nil { + t.Fatalf("error getting config file") + } + newConfigFile := originalConfigFile + + // set it to a URL that will always be unreachable + newConfigFile.LoggedInUserDomain = "http://localhost:4999" + util.WriteConfigFile(&newConfigFile) + + // restore config file + defer util.WriteConfigFile(&originalConfigFile) + + output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent") + if err != nil { + t.Fatalf("error running CLI command: %v", err) + } + + // Use cupaloy to snapshot test the output + err = cupaloy.Snapshot(output) + if err != nil { + t.Fatalf("snapshot failed: %v", err) + } +} diff --git a/company/documentation/getting-started/introduction.mdx b/company/documentation/getting-started/introduction.mdx index 0f414c62a9..55b4831949 100644 --- a/company/documentation/getting-started/introduction.mdx +++ b/company/documentation/getting-started/introduction.mdx @@ -4,59 +4,63 @@ sidebarTitle: "What is Infisical?" description: "An Introduction to the Infisical secret management platform." --- -Infisical is an [open-source](https://github.com/infisical/infisical) secret management platform for developers. -It provides capabilities for storing, managing, and syncing application configuration and secrets like API keys, database -credentials, and certificates across infrastructure. In addition, Infisical prevents secrets leaks to git and enables secure -sharing of secrets among engineers. +**[Infisical](https://infisical.com)** is the open source secret management platform that developers use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI. In addition, developers use Infisical to prevent secrets leaks to git and securely share secrets amongst engineers. Start managing secrets securely with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself. - - Get started with Infisical Cloud in just a few minutes. - - - Self-host Infisical on your own infrastructure. - + + Get started with Infisical Cloud in just a few minutes. + + + Self-host Infisical on your own infrastructure. + -## Why Infisical? +## Why Infisical? + +Infisical helps developers achieve secure centralized secret management and provides all the tools to easily manage secrets in various environments and infrastructure components. In particular, here are some of the most common points that developers mention after adopting Infisical: -Infisical helps developers achieve secure centralized secret management and provides all the tools to easily manage secrets in various environments and infrastructure components. In particular, here are some of the most common points that developers mention after adopting Infisical: - Streamlined **local development** processes (switching .env files to [Infisical CLI](/cli/commands/run) and removing secrets from developer machines). -- **Best-in-class developer experience** with an easy-to-use [Web Dashboard](/documentation/platform/project). -- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments. -- Secure and compliant secret management practices in **[production environments](/sdks/overview)**. +- **Best-in-class developer experience** with an easy-to-use [Web Dashboard](/documentation/platform/project). +- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments. +- Secure and compliant secret management practices in **[production environments](/sdks/overview)**. - **Facilitated workflows** around [secret change management](/documentation/platform/pr-workflows), [access requests](/documentation/platform/access-controls/access-requests), [temporary access provisioning](/documentation/platform/access-controls/temporary-access), and more. - **Improved security posture** thanks to [secret scanning](/cli/scanning-overview), [granular access control policies](/documentation/platform/access-controls/overview), [automated secret rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview), and [dynamic secrets](/documentation/platform/dynamic-secrets/overview) capabilities. -## How does Infisical work? +## How does Infisical work? -To make secret management effortless and secure, Infisical follows a certain structure for enabling secret management workflows as defined below. +To make secret management effortless and secure, Infisical follows a certain structure for enabling secret management workflows as defined below. -**Identities** in Infisical are users or machine which have a certain set of roles and permissions assigned to them. Such identities are able to manage secrets in various **Clients** throughout the entire infrastructure. To do that, identities have to verify themselves through one of the available **Authentication Methods**. +**Identities** in Infisical are users or machine which have a certain set of roles and permissions assigned to them. Such identities are able to manage secrets in various **Clients** throughout the entire infrastructure. To do that, identities have to verify themselves through one of the available **Authentication Methods**. -As a result, the 3 main concepts that are important to understand are: -- **[Identities](/documentation/platform/identities/overview)**: users or machines with a set permissions assigned to them. +As a result, the 3 main concepts that are important to understand are: + +- **[Identities](/documentation/platform/identities/overview)**: users or machines with a set permissions assigned to them. - **[Clients](/integrations/platforms/kubernetes)**: Infisical-developed tools for managing secrets in various infrastructure components (e.g., [Kubernetes Operator](/integrations/platforms/kubernetes), [Infisical Agent](/integrations/platforms/infisical-agent), [CLI](/cli/usage), [SDKs](/sdks/overview), [API](/api-reference/overview/introduction), [Web Dashboard](/documentation/platform/organization)). - **[Authentication Methods](/documentation/platform/identities/universal-auth)**: ways for Identities to authenticate inside different clients (e.g., SAML SSO for Web Dashboard, Universal Auth for Infisical Agent, etc.). -## How to get started with Infisical? +## How to get started with Infisical? Depending on your use case, it might be helpful to look into some of the resources and guides provided below. - + Inject secrets into any application process/environment. Fetch secrets with any programming language on demand. - + Inject secrets into Docker containers. +Please spend money in a way that you think is in the best interest of the company. + -## Trivial expenses +# Trivial expenses We don't want you to be slowed down because you're waiting for an approval to purchase some SaaS. For trivial expenses – **Just do it**. @@ -22,6 +24,35 @@ Make sure you keep copies for all receipts. If you expense something on a compan You should default to using your company card in all cases - it has no transaction fees. If using your personal card is unavoidable, please reach out to Maidul to get it reimbursed manually. + +# Equipment + +Infisical is a remote first company so we understand the importance of having a comfortable work setup. To support this, we provide allowances for essential office equipment. + +### Desk & Chair + +Most people already have a comfortable desk and chair, but if you need an upgrade, we offer the following allowances. +While we're not yet able to provide the latest and greatest, we strive to be reasonable given the stage of our company. + +**Desk**: $150 USD + +**Chair**: $150 USD + +### Laptop +Each team member will receive a company-issued Macbook Pro before they start their first day. + +### Notes + +1. All equipment purchased using company allowances remains the property of Infisical. +2. Keep all receipts for equipment purchases and submit them for reimbursement. +3. If you leave Infisical, you may be required to return company-owned equipment. + +Please note that we're unable to offer a split payment option where the Infisical pays half and you pay half for equipment exceeding the allowance. +This is because we don't yet have a formal HR department to handle such logistics. + +For any equipment related questions, please reach out to Maidul. + + ## Brex We use Brex as our primary credit card provider. Don't have a company card yet? Reach out to Maidul. \ No newline at end of file diff --git a/company/handbook/talking-to-customers.mdx b/company/handbook/talking-to-customers.mdx new file mode 100644 index 0000000000..3f10091626 --- /dev/null +++ b/company/handbook/talking-to-customers.mdx @@ -0,0 +1,20 @@ +--- +title: "Talking to Customers" +sidebarTitle: "Talking to Customers" +description: "The guide to talking to customers at Infisical." +--- + +Everyone at Infisical talks to customers directly. We do this for a few reasons: +1. This helps us understand the needs of our customers and build the product they want. +2. This speeds up our iteration cycles (time from customer feedback to product improvements). +3. Our customers (developers) are able to talk directly to the best experts in Infisical (us) – which improves their satisfaction and success. + +## Customer Communication Etiquette + +1. When talking to customers (no matter whether it's on Slack, email, or any other channel), it is very important to use proper grammar (e.g., no typos, no missed question marks) and minimal colloquial language (no "yeap", "yah", etc.). +2. At the time of a crisis (e.g., customer-reported bug), it is very important to communicate often. Even if there is no update yet, it is good to reach out to the customer and let them know that we are still working on resolving a certain issue. + +## Community Slack + +Unfortunately, we are not able to help everyone in the community Slack. It is OK to politely decline questions about infrastructure management that are not directly related to the product itself. + diff --git a/company/handbook/time-off.mdx b/company/handbook/time-off.mdx index a807214401..aae1118787 100644 --- a/company/handbook/time-off.mdx +++ b/company/handbook/time-off.mdx @@ -10,4 +10,8 @@ To request time off, just submit a request in Rippling and let Maidul know at le ## National holidays -Since Infisical's team is globally distributed, it is hard for us to keep track of all the various national holidays across many different countries. Whether you'd like to celebrate Christmas or National Brisket Day (which, by the way, is on May 28th), you are welcome to take PTO on those days – just let Maidul know at least a week ahead so that we can adjust our planning. \ No newline at end of file +Since Infisical's team is globally distributed, it is hard for us to keep track of all the various national holidays across many different countries. Whether you'd like to celebrate Christmas or National Brisket Day (which, by the way, is on May 28th), you are welcome to take PTO on those days – just let Maidul know at least a week ahead so that we can adjust our planning. + +## Winter Break + +Every year, Infisical team goes on a company-wide vacation during winter holidays. This year, the winter break period starts on December 21st, 2024 and ends on January 5th, 2025. You should expect to do no scheduled work during this period, but we will have a rotation process for [high and urgent service disruptions](https://infisical.com/sla). \ No newline at end of file diff --git a/company/mint.json b/company/mint.json index 0ae63107b4..002a414f99 100644 --- a/company/mint.json +++ b/company/mint.json @@ -58,11 +58,20 @@ "pages": [ "handbook/onboarding", "handbook/spending-money", - "handbook/time-off" + "handbook/compensation", + "handbook/time-off", + "handbook/hiring", + "handbook/meetings", + "handbook/talking-to-customers" ] } ], "integrations": { "intercom": "hsg644ru" + }, + "analytics": { + "koala": { + "publicApiKey": "pk_b50d7184e0e39ddd5cdb43cf6abeadd9b97d" + } } } diff --git a/company/style.css b/company/style.css index ea8c60dc99..f5e1bfc496 100644 --- a/company/style.css +++ b/company/style.css @@ -10,7 +10,6 @@ #sidebar { left: 0; - padding-left: 48px; padding-right: 30px; border-right: 1px; border-color: #cdd64b; @@ -18,6 +17,10 @@ border-right: 1px solid #ebebeb; } +#sidebar-content { + padding-left: 2rem; +} + #sidebar .relative .sticky { opacity: 0; } diff --git a/docker-compose.dev-read-replica.yml b/docker-compose.dev-read-replica.yml new file mode 100644 index 0000000000..7d1e6e7feb --- /dev/null +++ b/docker-compose.dev-read-replica.yml @@ -0,0 +1,191 @@ +version: "3.9" + +services: + nginx: + container_name: infisical-dev-nginx + image: nginx + restart: always + ports: + - 8080:80 + volumes: + - ./nginx/default.dev.conf:/etc/nginx/conf.d/default.conf:ro + depends_on: + - backend + - frontend + + db: + image: bitnami/postgresql:14 + ports: + - "5432:5432" + volumes: + - postgres-data:/var/lib/postgresql/data + environment: + POSTGRESQL_PASSWORD: infisical + POSTGRESQL_USERNAME: infisical + POSTGRESQL_DATABASE: infisical + POSTGRESQL_REPLICATION_MODE: master + POSTGRESQL_REPLICATION_USER: repl_user + POSTGRESQL_REPLICATION_PASSWORD: repl_password + POSTGRESQL_SYNCHRONOUS_COMMIT_MODE: on + POSTGRESQL_NUM_SYNCHRONOUS_REPLICAS: 1 + + db-slave: + image: bitnami/postgresql:14 + ports: + - "5433:5432" + volumes: + - postgres-data:/var/lib/postgresql/data + environment: + POSTGRESQL_PASSWORD: infisical + POSTGRESQL_USERNAME: infisical + POSTGRESQL_DATABASE: infisical + POSTGRESQL_REPLICATION_MODE: slave + POSTGRESQL_REPLICATION_USER: repl_user + POSTGRESQL_REPLICATION_PASSWORD: repl_password + POSTGRESQL_MASTER_HOST: db + POSTGRESQL_MASTER_PORT_NUMBER: 5432 + + + redis: + image: redis + container_name: infisical-dev-redis + environment: + - ALLOW_EMPTY_PASSWORD=yes + ports: + - 6379:6379 + volumes: + - redis_data:/data + + redis-commander: + container_name: infisical-dev-redis-commander + image: rediscommander/redis-commander + restart: always + depends_on: + - redis + environment: + - REDIS_HOSTS=local:redis:6379 + ports: + - "8085:8081" + + db-test: + profiles: ["test"] + image: postgres:14-alpine + ports: + - "5430:5432" + environment: + POSTGRES_PASSWORD: infisical + POSTGRES_USER: infisical + POSTGRES_DB: infisical-test + + db-migration: + container_name: infisical-db-migration + depends_on: + - db + build: + context: ./backend + dockerfile: Dockerfile.dev + env_file: .env + environment: + - DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable + command: npm run migration:latest + volumes: + - ./backend/src:/app/src + + backend: + container_name: infisical-dev-api + build: + context: ./backend + dockerfile: Dockerfile.dev + depends_on: + db: + condition: service_started + redis: + condition: service_started + db-migration: + condition: service_completed_successfully + env_file: + - .env + ports: + - 4000:4000 + environment: + - NODE_ENV=development + - DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable + - TELEMETRY_ENABLED=false + volumes: + - ./backend/src:/app/src + extra_hosts: + - "host.docker.internal:host-gateway" + + frontend: + container_name: infisical-dev-frontend + restart: unless-stopped + depends_on: + - backend + build: + context: ./frontend + dockerfile: Dockerfile.dev + volumes: + - ./frontend/src:/app/src/ # mounted whole src to avoid missing reload on new files + - ./frontend/public:/app/public + env_file: .env + environment: + - NEXT_PUBLIC_ENV=development + - INFISICAL_TELEMETRY_ENABLED=false + + pgadmin: + image: dpage/pgadmin4 + restart: always + environment: + PGADMIN_DEFAULT_EMAIL: admin@example.com + PGADMIN_DEFAULT_PASSWORD: pass + ports: + - 5050:80 + depends_on: + - db + + smtp-server: + container_name: infisical-dev-smtp-server + image: lytrax/mailhog:latest # https://github.com/mailhog/MailHog/issues/353#issuecomment-821137362 + restart: always + logging: + driver: "none" # disable saving logs + ports: + - 1025:1025 # SMTP server + - 8025:8025 # Web UI + + openldap: # note: more advanced configuration is available + image: osixia/openldap:1.5.0 + restart: always + environment: + LDAP_ORGANISATION: Acme + LDAP_DOMAIN: acme.com + LDAP_ADMIN_PASSWORD: admin + ports: + - 389:389 + - 636:636 + volumes: + - ldap_data:/var/lib/ldap + - ldap_config:/etc/ldap/slapd.d + profiles: [ldap] + + phpldapadmin: # username: cn=admin,dc=acme,dc=com, pass is admin + image: osixia/phpldapadmin:latest + restart: always + environment: + - PHPLDAPADMIN_LDAP_HOSTS=openldap + - PHPLDAPADMIN_HTTPS=false + ports: + - 6433:80 + depends_on: + - openldap + profiles: [ldap] + +volumes: + postgres-data: + driver: local + postgres-slave-data: + driver: local + redis_data: + driver: local + ldap_data: + ldap_config: diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 422fe43f3c..9e56ae5891 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -7,6 +7,7 @@ services: restart: always ports: - 8080:80 + - 8443:443 volumes: - ./nginx/default.dev.conf:/etc/nginx/conf.d/default.conf:ro depends_on: diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index 86a8e4cca3..77a1e04ab7 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -37,6 +37,7 @@ services: image: redis container_name: infisical-dev-redis env_file: .env + restart: always environment: - ALLOW_EMPTY_PASSWORD=yes ports: @@ -68,4 +69,4 @@ volumes: driver: local networks: - infisical: + infisical: \ No newline at end of file diff --git a/docs/api-reference/endpoints/audit-logs/export-audit-log.mdx b/docs/api-reference/endpoints/audit-logs/export-audit-log.mdx index aa5adb0044..d39cbe3d16 100644 --- a/docs/api-reference/endpoints/audit-logs/export-audit-log.mdx +++ b/docs/api-reference/endpoints/audit-logs/export-audit-log.mdx @@ -1,4 +1,4 @@ --- title: "Export" -openapi: "GET /api/v1/workspace/{workspaceId}/audit-logs" +openapi: "GET /api/v1/organization/audit-logs" --- diff --git a/docs/api-reference/endpoints/aws-auth/attach.mdx b/docs/api-reference/endpoints/aws-auth/attach.mdx new file mode 100644 index 0000000000..85b2d6819f --- /dev/null +++ b/docs/api-reference/endpoints/aws-auth/attach.mdx @@ -0,0 +1,4 @@ +--- +title: "Attach" +openapi: "POST /api/v1/auth/aws-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/aws-auth/login.mdx b/docs/api-reference/endpoints/aws-auth/login.mdx new file mode 100644 index 0000000000..751903be6d --- /dev/null +++ b/docs/api-reference/endpoints/aws-auth/login.mdx @@ -0,0 +1,4 @@ +--- +title: "Login" +openapi: "POST /api/v1/auth/aws-auth/login" +--- diff --git a/docs/api-reference/endpoints/aws-auth/retrieve.mdx b/docs/api-reference/endpoints/aws-auth/retrieve.mdx new file mode 100644 index 0000000000..4b1980dd2b --- /dev/null +++ b/docs/api-reference/endpoints/aws-auth/retrieve.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/auth/aws-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/aws-auth/revoke.mdx b/docs/api-reference/endpoints/aws-auth/revoke.mdx new file mode 100644 index 0000000000..4d19fa95f7 --- /dev/null +++ b/docs/api-reference/endpoints/aws-auth/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "DELETE /api/v1/auth/aws-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/aws-auth/update.mdx b/docs/api-reference/endpoints/aws-auth/update.mdx new file mode 100644 index 0000000000..949d2a901f --- /dev/null +++ b/docs/api-reference/endpoints/aws-auth/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/auth/aws-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/azure-auth/attach.mdx b/docs/api-reference/endpoints/azure-auth/attach.mdx new file mode 100644 index 0000000000..cafaf6df18 --- /dev/null +++ b/docs/api-reference/endpoints/azure-auth/attach.mdx @@ -0,0 +1,4 @@ +--- +title: "Attach" +openapi: "POST /api/v1/auth/azure-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/azure-auth/login.mdx b/docs/api-reference/endpoints/azure-auth/login.mdx new file mode 100644 index 0000000000..c9e51d51b6 --- /dev/null +++ b/docs/api-reference/endpoints/azure-auth/login.mdx @@ -0,0 +1,4 @@ +--- +title: "Login" +openapi: "POST /api/v1/auth/azure-auth/login" +--- diff --git a/docs/api-reference/endpoints/azure-auth/retrieve.mdx b/docs/api-reference/endpoints/azure-auth/retrieve.mdx new file mode 100644 index 0000000000..b07ee2ad4b --- /dev/null +++ b/docs/api-reference/endpoints/azure-auth/retrieve.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/auth/azure-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/azure-auth/revoke.mdx b/docs/api-reference/endpoints/azure-auth/revoke.mdx new file mode 100644 index 0000000000..7254bdd8b9 --- /dev/null +++ b/docs/api-reference/endpoints/azure-auth/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "DELETE /api/v1/auth/azure-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/azure-auth/update.mdx b/docs/api-reference/endpoints/azure-auth/update.mdx new file mode 100644 index 0000000000..ee830a1f3a --- /dev/null +++ b/docs/api-reference/endpoints/azure-auth/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/auth/azure-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/cert.mdx b/docs/api-reference/endpoints/certificate-authorities/cert.mdx new file mode 100644 index 0000000000..3706e0b117 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/cert.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve certificate / chain" +openapi: "GET /api/v1/pki/ca/{caId}/certificate" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/create.mdx b/docs/api-reference/endpoints/certificate-authorities/create.mdx new file mode 100644 index 0000000000..35e758e4bd --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/pki/ca" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/crl.mdx b/docs/api-reference/endpoints/certificate-authorities/crl.mdx new file mode 100644 index 0000000000..428c3377e4 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/crl.mdx @@ -0,0 +1,4 @@ +--- +title: "List CRLs" +openapi: "GET /api/v1/pki/ca/{caId}/crls" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/csr.mdx b/docs/api-reference/endpoints/certificate-authorities/csr.mdx new file mode 100644 index 0000000000..2477a629e6 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/csr.mdx @@ -0,0 +1,4 @@ +--- +title: "Get CSR" +openapi: "GET /api/v1/pki/ca/{caId}/csr" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/delete.mdx b/docs/api-reference/endpoints/certificate-authorities/delete.mdx new file mode 100644 index 0000000000..f79b8f458a --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/pki/ca/{caId}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/import-cert.mdx b/docs/api-reference/endpoints/certificate-authorities/import-cert.mdx new file mode 100644 index 0000000000..7f0e40f959 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/import-cert.mdx @@ -0,0 +1,4 @@ +--- +title: "Import certificate" +openapi: "POST /api/v1/pki/ca/{caId}/import-certificate" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/issue-cert.mdx b/docs/api-reference/endpoints/certificate-authorities/issue-cert.mdx new file mode 100644 index 0000000000..045cada589 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/issue-cert.mdx @@ -0,0 +1,4 @@ +--- +title: "Issue certificate" +openapi: "POST /api/v1/pki/ca/{caId}/issue-certificate" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/list-ca-certs.mdx b/docs/api-reference/endpoints/certificate-authorities/list-ca-certs.mdx new file mode 100644 index 0000000000..ce253807cd --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/list-ca-certs.mdx @@ -0,0 +1,4 @@ +--- +title: "List CA certificates" +openapi: "GET /api/v1/pki/ca/{caId}/ca-certificates" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/list.mdx b/docs/api-reference/endpoints/certificate-authorities/list.mdx new file mode 100644 index 0000000000..ba4a433489 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v2/workspace/{slug}/cas" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/read.mdx b/docs/api-reference/endpoints/certificate-authorities/read.mdx new file mode 100644 index 0000000000..54dc26392d --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/read.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/pki/ca/{caId}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/renew.mdx b/docs/api-reference/endpoints/certificate-authorities/renew.mdx new file mode 100644 index 0000000000..901811f2d8 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/renew.mdx @@ -0,0 +1,4 @@ +--- +title: "Renew" +openapi: "POST /api/v1/pki/ca/{caId}/renew" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/sign-cert.mdx b/docs/api-reference/endpoints/certificate-authorities/sign-cert.mdx new file mode 100644 index 0000000000..95c8d8c652 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/sign-cert.mdx @@ -0,0 +1,4 @@ +--- +title: "Sign certificate" +openapi: "POST /api/v1/pki/ca/{caId}/sign-certificate" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/sign-intermediate.mdx b/docs/api-reference/endpoints/certificate-authorities/sign-intermediate.mdx new file mode 100644 index 0000000000..310bbea269 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/sign-intermediate.mdx @@ -0,0 +1,4 @@ +--- +title: "Sign intermediate certificate" +openapi: "POST /api/v1/pki/ca/{caId}/sign-intermediate" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/update.mdx b/docs/api-reference/endpoints/certificate-authorities/update.mdx new file mode 100644 index 0000000000..d18a728bfe --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/pki/ca/{caId}" +--- diff --git a/docs/api-reference/endpoints/certificate-templates/create.mdx b/docs/api-reference/endpoints/certificate-templates/create.mdx new file mode 100644 index 0000000000..56fcf3791f --- /dev/null +++ b/docs/api-reference/endpoints/certificate-templates/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/pki/certificate-templates" +--- diff --git a/docs/api-reference/endpoints/certificate-templates/delete.mdx b/docs/api-reference/endpoints/certificate-templates/delete.mdx new file mode 100644 index 0000000000..c4f13d470b --- /dev/null +++ b/docs/api-reference/endpoints/certificate-templates/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/pki/certificate-templates/{certificateTemplateId}" +--- diff --git a/docs/api-reference/endpoints/certificate-templates/get-by-id.mdx b/docs/api-reference/endpoints/certificate-templates/get-by-id.mdx new file mode 100644 index 0000000000..802dc5326b --- /dev/null +++ b/docs/api-reference/endpoints/certificate-templates/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by ID" +openapi: "GET /api/v1/pki/certificate-templates/{certificateTemplateId}" +--- diff --git a/docs/api-reference/endpoints/certificate-templates/update.mdx b/docs/api-reference/endpoints/certificate-templates/update.mdx new file mode 100644 index 0000000000..53c5f6fdf4 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-templates/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/pki/certificate-templates/{certificateTemplateId}" +--- diff --git a/docs/api-reference/endpoints/certificates/cert-body.mdx b/docs/api-reference/endpoints/certificates/cert-body.mdx new file mode 100644 index 0000000000..e4c3b01239 --- /dev/null +++ b/docs/api-reference/endpoints/certificates/cert-body.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Certificate Body / Chain" +openapi: "GET /api/v1/pki/certificates/{serialNumber}/certificate" +--- diff --git a/docs/api-reference/endpoints/certificates/delete.mdx b/docs/api-reference/endpoints/certificates/delete.mdx new file mode 100644 index 0000000000..27042af42f --- /dev/null +++ b/docs/api-reference/endpoints/certificates/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/pki/certificates/{serialNumber}" +--- diff --git a/docs/api-reference/endpoints/certificates/issue-certificate.mdx b/docs/api-reference/endpoints/certificates/issue-certificate.mdx new file mode 100644 index 0000000000..90a79a4af9 --- /dev/null +++ b/docs/api-reference/endpoints/certificates/issue-certificate.mdx @@ -0,0 +1,4 @@ +--- +title: "Issue Certificate" +openapi: "POST /api/v1/pki/certificates/issue-certificate" +--- diff --git a/docs/api-reference/endpoints/certificates/list.mdx b/docs/api-reference/endpoints/certificates/list.mdx new file mode 100644 index 0000000000..67a4623a4c --- /dev/null +++ b/docs/api-reference/endpoints/certificates/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v2/workspace/{slug}/certificates" +--- diff --git a/docs/api-reference/endpoints/certificates/read.mdx b/docs/api-reference/endpoints/certificates/read.mdx new file mode 100644 index 0000000000..ce6463ddef --- /dev/null +++ b/docs/api-reference/endpoints/certificates/read.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/pki/certificates/{serialNumber}" +--- diff --git a/docs/api-reference/endpoints/certificates/revoke.mdx b/docs/api-reference/endpoints/certificates/revoke.mdx new file mode 100644 index 0000000000..e4da73a19a --- /dev/null +++ b/docs/api-reference/endpoints/certificates/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "POST /api/v1/pki/certificates/{serialNumber}/revoke" +--- diff --git a/docs/api-reference/endpoints/certificates/sign-certificate.mdx b/docs/api-reference/endpoints/certificates/sign-certificate.mdx new file mode 100644 index 0000000000..3132d5846f --- /dev/null +++ b/docs/api-reference/endpoints/certificates/sign-certificate.mdx @@ -0,0 +1,4 @@ +--- +title: "Sign Certificate" +openapi: "POST /api/v1/pki/certificates/sign-certificate" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/create-lease.mdx b/docs/api-reference/endpoints/dynamic-secrets/create-lease.mdx new file mode 100644 index 0000000000..57e235f50d --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/create-lease.mdx @@ -0,0 +1,4 @@ +--- +title: "Create Lease" +openapi: "POST /api/v1/dynamic-secrets/leases" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/create.mdx b/docs/api-reference/endpoints/dynamic-secrets/create.mdx new file mode 100644 index 0000000000..1c591bd3d1 --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/dynamic-secrets" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/delete-lease.mdx b/docs/api-reference/endpoints/dynamic-secrets/delete-lease.mdx new file mode 100644 index 0000000000..93e209995e --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/delete-lease.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete Lease" +openapi: "DELETE /api/v1/dynamic-secrets/leases/{leaseId}" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/delete.mdx b/docs/api-reference/endpoints/dynamic-secrets/delete.mdx new file mode 100644 index 0000000000..e59ac43795 --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/dynamic-secrets/{name}" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/get-lease.mdx b/docs/api-reference/endpoints/dynamic-secrets/get-lease.mdx new file mode 100644 index 0000000000..aff12207e3 --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/get-lease.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Lease" +openapi: "GET /api/v1/dynamic-secrets/leases/{leaseId}" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/get.mdx b/docs/api-reference/endpoints/dynamic-secrets/get.mdx new file mode 100644 index 0000000000..bc3267e97d --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/get.mdx @@ -0,0 +1,4 @@ +--- +title: "Get" +openapi: "GET /api/v1/dynamic-secrets/{name}" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/list-leases.mdx b/docs/api-reference/endpoints/dynamic-secrets/list-leases.mdx new file mode 100644 index 0000000000..26cad150d9 --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/list-leases.mdx @@ -0,0 +1,4 @@ +--- +title: "List Leases" +openapi: "GET /api/v1/dynamic-secrets/{name}/leases" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/list.mdx b/docs/api-reference/endpoints/dynamic-secrets/list.mdx new file mode 100644 index 0000000000..d76c1167e1 --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v1/dynamic-secrets" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/renew-lease.mdx b/docs/api-reference/endpoints/dynamic-secrets/renew-lease.mdx new file mode 100644 index 0000000000..ea1777e6b0 --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/renew-lease.mdx @@ -0,0 +1,4 @@ +--- +title: "Renew Lease" +openapi: "POST /api/v1/dynamic-secrets/leases/{leaseId}/renew" +--- diff --git a/docs/api-reference/endpoints/dynamic-secrets/update.mdx b/docs/api-reference/endpoints/dynamic-secrets/update.mdx new file mode 100644 index 0000000000..0ab4aaab45 --- /dev/null +++ b/docs/api-reference/endpoints/dynamic-secrets/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/dynamic-secrets/{name}" +--- diff --git a/docs/api-reference/endpoints/folders/get-by-id.mdx b/docs/api-reference/endpoints/folders/get-by-id.mdx new file mode 100644 index 0000000000..db3c4d0cc9 --- /dev/null +++ b/docs/api-reference/endpoints/folders/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by ID" +openapi: "GET /api/v1/folders/{id}" +--- diff --git a/docs/api-reference/endpoints/gcp-auth/attach.mdx b/docs/api-reference/endpoints/gcp-auth/attach.mdx new file mode 100644 index 0000000000..04bc754f5d --- /dev/null +++ b/docs/api-reference/endpoints/gcp-auth/attach.mdx @@ -0,0 +1,4 @@ +--- +title: "Attach" +openapi: "POST /api/v1/auth/gcp-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/gcp-auth/login.mdx b/docs/api-reference/endpoints/gcp-auth/login.mdx new file mode 100644 index 0000000000..ff5caf2ac2 --- /dev/null +++ b/docs/api-reference/endpoints/gcp-auth/login.mdx @@ -0,0 +1,4 @@ +--- +title: "Login" +openapi: "POST /api/v1/auth/gcp-auth/login" +--- diff --git a/docs/api-reference/endpoints/gcp-auth/retrieve.mdx b/docs/api-reference/endpoints/gcp-auth/retrieve.mdx new file mode 100644 index 0000000000..48124f2b92 --- /dev/null +++ b/docs/api-reference/endpoints/gcp-auth/retrieve.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/auth/gcp-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/gcp-auth/revoke.mdx b/docs/api-reference/endpoints/gcp-auth/revoke.mdx new file mode 100644 index 0000000000..1e1b3f1a4c --- /dev/null +++ b/docs/api-reference/endpoints/gcp-auth/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "DELETE /api/v1/auth/gcp-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/gcp-auth/update.mdx b/docs/api-reference/endpoints/gcp-auth/update.mdx new file mode 100644 index 0000000000..ee1e269d34 --- /dev/null +++ b/docs/api-reference/endpoints/gcp-auth/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/auth/gcp-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/groups/add-group-user.mdx b/docs/api-reference/endpoints/groups/add-group-user.mdx new file mode 100644 index 0000000000..bd490c9a19 --- /dev/null +++ b/docs/api-reference/endpoints/groups/add-group-user.mdx @@ -0,0 +1,4 @@ +--- +title: "Add Group User" +openapi: "POST /api/v1/groups/{id}/users/{username}" +--- diff --git a/docs/api-reference/endpoints/groups/create.mdx b/docs/api-reference/endpoints/groups/create.mdx new file mode 100644 index 0000000000..3f69c525a0 --- /dev/null +++ b/docs/api-reference/endpoints/groups/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/groups" +--- diff --git a/docs/api-reference/endpoints/groups/delete.mdx b/docs/api-reference/endpoints/groups/delete.mdx new file mode 100644 index 0000000000..54f0a10c0b --- /dev/null +++ b/docs/api-reference/endpoints/groups/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/groups/{id}" +--- diff --git a/docs/api-reference/endpoints/groups/get-by-id.mdx b/docs/api-reference/endpoints/groups/get-by-id.mdx new file mode 100644 index 0000000000..558d820512 --- /dev/null +++ b/docs/api-reference/endpoints/groups/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get By ID" +openapi: "GET /api/v1/groups/{id}" +--- diff --git a/docs/api-reference/endpoints/groups/get.mdx b/docs/api-reference/endpoints/groups/get.mdx new file mode 100644 index 0000000000..4acba43528 --- /dev/null +++ b/docs/api-reference/endpoints/groups/get.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Groups in Organization" +openapi: "GET /api/v1/groups" +--- diff --git a/docs/api-reference/endpoints/groups/list-group-users.mdx b/docs/api-reference/endpoints/groups/list-group-users.mdx new file mode 100644 index 0000000000..166f45527f --- /dev/null +++ b/docs/api-reference/endpoints/groups/list-group-users.mdx @@ -0,0 +1,4 @@ +--- +title: "List Group Users" +openapi: "GET /api/v1/groups/{id}/users" +--- diff --git a/docs/api-reference/endpoints/groups/remove-group-user.mdx b/docs/api-reference/endpoints/groups/remove-group-user.mdx new file mode 100644 index 0000000000..a8ef6ed09f --- /dev/null +++ b/docs/api-reference/endpoints/groups/remove-group-user.mdx @@ -0,0 +1,4 @@ +--- +title: "Remove Group User" +openapi: "DELETE /api/v1/groups/{id}/users/{username}" +--- diff --git a/docs/api-reference/endpoints/groups/update.mdx b/docs/api-reference/endpoints/groups/update.mdx new file mode 100644 index 0000000000..d008edc5a0 --- /dev/null +++ b/docs/api-reference/endpoints/groups/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/groups/{id}" +--- diff --git a/docs/api-reference/endpoints/identities/get-by-id.mdx b/docs/api-reference/endpoints/identities/get-by-id.mdx new file mode 100644 index 0000000000..f721d3556a --- /dev/null +++ b/docs/api-reference/endpoints/identities/get-by-id.mdx @@ -0,0 +1,5 @@ +--- +title: "Get By ID" +openapi: "GET /api/v1/identities/{identityId}" +--- + diff --git a/docs/api-reference/endpoints/identities/list.mdx b/docs/api-reference/endpoints/identities/list.mdx new file mode 100644 index 0000000000..d8972e3a91 --- /dev/null +++ b/docs/api-reference/endpoints/identities/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v1/identities" +--- diff --git a/docs/api-reference/endpoints/kms/keys/create.mdx b/docs/api-reference/endpoints/kms/keys/create.mdx new file mode 100644 index 0000000000..194d466bf3 --- /dev/null +++ b/docs/api-reference/endpoints/kms/keys/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create Key" +openapi: "POST /api/v1/kms/keys" +--- diff --git a/docs/api-reference/endpoints/kms/keys/decrypt.mdx b/docs/api-reference/endpoints/kms/keys/decrypt.mdx new file mode 100644 index 0000000000..2ab8ce4ab6 --- /dev/null +++ b/docs/api-reference/endpoints/kms/keys/decrypt.mdx @@ -0,0 +1,4 @@ +--- +title: "Decrypt Data" +openapi: "POST /api/v1/kms/keys/{keyId}/decrypt" +--- diff --git a/docs/api-reference/endpoints/kms/keys/delete.mdx b/docs/api-reference/endpoints/kms/keys/delete.mdx new file mode 100644 index 0000000000..91739d362b --- /dev/null +++ b/docs/api-reference/endpoints/kms/keys/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete Key" +openapi: "DELETE /api/v1/kms/keys/{keyId}" +--- diff --git a/docs/api-reference/endpoints/kms/keys/encrypt.mdx b/docs/api-reference/endpoints/kms/keys/encrypt.mdx new file mode 100644 index 0000000000..6d9db8006f --- /dev/null +++ b/docs/api-reference/endpoints/kms/keys/encrypt.mdx @@ -0,0 +1,4 @@ +--- +title: "Encrypt Data" +openapi: "POST /api/v1/kms/keys/{keyId}/encrypt" +--- diff --git a/docs/api-reference/endpoints/kms/keys/list.mdx b/docs/api-reference/endpoints/kms/keys/list.mdx new file mode 100644 index 0000000000..983c98ae3d --- /dev/null +++ b/docs/api-reference/endpoints/kms/keys/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List Keys" +openapi: "Get /api/v1/kms/keys" +--- diff --git a/docs/api-reference/endpoints/kms/keys/update.mdx b/docs/api-reference/endpoints/kms/keys/update.mdx new file mode 100644 index 0000000000..7e44420b1e --- /dev/null +++ b/docs/api-reference/endpoints/kms/keys/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update Key" +openapi: "PATCH /api/v1/kms/keys/{keyId}" +--- diff --git a/docs/api-reference/endpoints/kubernetes-auth/attach.mdx b/docs/api-reference/endpoints/kubernetes-auth/attach.mdx new file mode 100644 index 0000000000..969bbd8a1b --- /dev/null +++ b/docs/api-reference/endpoints/kubernetes-auth/attach.mdx @@ -0,0 +1,4 @@ +--- +title: "Attach" +openapi: "POST /api/v1/auth/kubernetes-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/kubernetes-auth/login.mdx b/docs/api-reference/endpoints/kubernetes-auth/login.mdx new file mode 100644 index 0000000000..0b6539be5a --- /dev/null +++ b/docs/api-reference/endpoints/kubernetes-auth/login.mdx @@ -0,0 +1,4 @@ +--- +title: "Login" +openapi: "POST /api/v1/auth/kubernetes-auth/login" +--- diff --git a/docs/api-reference/endpoints/kubernetes-auth/retrieve.mdx b/docs/api-reference/endpoints/kubernetes-auth/retrieve.mdx new file mode 100644 index 0000000000..ca7b30d10e --- /dev/null +++ b/docs/api-reference/endpoints/kubernetes-auth/retrieve.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/auth/kubernetes-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/kubernetes-auth/revoke.mdx b/docs/api-reference/endpoints/kubernetes-auth/revoke.mdx new file mode 100644 index 0000000000..94c11cf73f --- /dev/null +++ b/docs/api-reference/endpoints/kubernetes-auth/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "DELETE /api/v1/auth/kubernetes-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/kubernetes-auth/update.mdx b/docs/api-reference/endpoints/kubernetes-auth/update.mdx new file mode 100644 index 0000000000..8eb173ac8a --- /dev/null +++ b/docs/api-reference/endpoints/kubernetes-auth/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/auth/kubernetes-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/oidc-auth/attach.mdx b/docs/api-reference/endpoints/oidc-auth/attach.mdx new file mode 100644 index 0000000000..c75bdc69ff --- /dev/null +++ b/docs/api-reference/endpoints/oidc-auth/attach.mdx @@ -0,0 +1,4 @@ +--- +title: "Attach" +openapi: "POST /api/v1/auth/oidc-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/oidc-auth/login.mdx b/docs/api-reference/endpoints/oidc-auth/login.mdx new file mode 100644 index 0000000000..baac1bef98 --- /dev/null +++ b/docs/api-reference/endpoints/oidc-auth/login.mdx @@ -0,0 +1,4 @@ +--- +title: "Login" +openapi: "POST /api/v1/auth/oidc-auth/login" +--- diff --git a/docs/api-reference/endpoints/oidc-auth/retrieve.mdx b/docs/api-reference/endpoints/oidc-auth/retrieve.mdx new file mode 100644 index 0000000000..a870ffc03b --- /dev/null +++ b/docs/api-reference/endpoints/oidc-auth/retrieve.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/auth/oidc-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/oidc-auth/revoke.mdx b/docs/api-reference/endpoints/oidc-auth/revoke.mdx new file mode 100644 index 0000000000..df46d6c45a --- /dev/null +++ b/docs/api-reference/endpoints/oidc-auth/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "DELETE /api/v1/auth/oidc-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/oidc-auth/update.mdx b/docs/api-reference/endpoints/oidc-auth/update.mdx new file mode 100644 index 0000000000..0d29c3db79 --- /dev/null +++ b/docs/api-reference/endpoints/oidc-auth/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/auth/oidc-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/pki-alerts/create.mdx b/docs/api-reference/endpoints/pki-alerts/create.mdx new file mode 100644 index 0000000000..458f0cd483 --- /dev/null +++ b/docs/api-reference/endpoints/pki-alerts/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/pki/alerts" +--- diff --git a/docs/api-reference/endpoints/pki-alerts/delete.mdx b/docs/api-reference/endpoints/pki-alerts/delete.mdx new file mode 100644 index 0000000000..c0918d1fea --- /dev/null +++ b/docs/api-reference/endpoints/pki-alerts/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/pki/alerts/{alertId}" +--- diff --git a/docs/api-reference/endpoints/pki-alerts/read.mdx b/docs/api-reference/endpoints/pki-alerts/read.mdx new file mode 100644 index 0000000000..928afdbc5d --- /dev/null +++ b/docs/api-reference/endpoints/pki-alerts/read.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/pki/alerts/{alertId}" +--- diff --git a/docs/api-reference/endpoints/pki-alerts/update.mdx b/docs/api-reference/endpoints/pki-alerts/update.mdx new file mode 100644 index 0000000000..829f8c57b9 --- /dev/null +++ b/docs/api-reference/endpoints/pki-alerts/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/pki/alerts/{alertId}" +--- diff --git a/docs/api-reference/endpoints/pki-collections/add-item.mdx b/docs/api-reference/endpoints/pki-collections/add-item.mdx new file mode 100644 index 0000000000..7a7da9c2e8 --- /dev/null +++ b/docs/api-reference/endpoints/pki-collections/add-item.mdx @@ -0,0 +1,4 @@ +--- +title: "Add Collection Item" +openapi: "POST /api/v1/pki/collections/{collectionId}/items" +--- diff --git a/docs/api-reference/endpoints/pki-collections/create.mdx b/docs/api-reference/endpoints/pki-collections/create.mdx new file mode 100644 index 0000000000..7211b622ce --- /dev/null +++ b/docs/api-reference/endpoints/pki-collections/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/pki/collections" +--- diff --git a/docs/api-reference/endpoints/pki-collections/delete-item.mdx b/docs/api-reference/endpoints/pki-collections/delete-item.mdx new file mode 100644 index 0000000000..e5805618b0 --- /dev/null +++ b/docs/api-reference/endpoints/pki-collections/delete-item.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete Collection Item" +openapi: "DELETE /api/v1/pki/collections/{collectionId}/items/{collectionItemId}" +--- diff --git a/docs/api-reference/endpoints/pki-collections/delete.mdx b/docs/api-reference/endpoints/pki-collections/delete.mdx new file mode 100644 index 0000000000..46e67bc4f4 --- /dev/null +++ b/docs/api-reference/endpoints/pki-collections/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/pki/collections/{collectionId}" +--- diff --git a/docs/api-reference/endpoints/pki-collections/list-items.mdx b/docs/api-reference/endpoints/pki-collections/list-items.mdx new file mode 100644 index 0000000000..dc2e1f04b5 --- /dev/null +++ b/docs/api-reference/endpoints/pki-collections/list-items.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/pki/collections/{collectionId}/items" +--- diff --git a/docs/api-reference/endpoints/pki-collections/read.mdx b/docs/api-reference/endpoints/pki-collections/read.mdx new file mode 100644 index 0000000000..de83f79964 --- /dev/null +++ b/docs/api-reference/endpoints/pki-collections/read.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/pki/collections/{collectionId}" +--- diff --git a/docs/api-reference/endpoints/pki-collections/update.mdx b/docs/api-reference/endpoints/pki-collections/update.mdx new file mode 100644 index 0000000000..7d24214e09 --- /dev/null +++ b/docs/api-reference/endpoints/pki-collections/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/pki/collections/{collectionId}" +--- diff --git a/docs/api-reference/endpoints/project-groups/create.mdx b/docs/api-reference/endpoints/project-groups/create.mdx new file mode 100644 index 0000000000..6b468085e4 --- /dev/null +++ b/docs/api-reference/endpoints/project-groups/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create Project Membership" +openapi: "POST /api/v2/workspace/{projectId}/groups/{groupId}" +--- diff --git a/docs/api-reference/endpoints/project-groups/delete.mdx b/docs/api-reference/endpoints/project-groups/delete.mdx new file mode 100644 index 0000000000..07db40d7fc --- /dev/null +++ b/docs/api-reference/endpoints/project-groups/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete Project Membership" +openapi: "DELETE /api/v2/workspace/{projectId}/groups/{groupId}" +--- diff --git a/docs/api-reference/endpoints/project-groups/get-by-id.mdx b/docs/api-reference/endpoints/project-groups/get-by-id.mdx new file mode 100644 index 0000000000..611f39059a --- /dev/null +++ b/docs/api-reference/endpoints/project-groups/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Project Membership" +openapi: "GET /api/v2/workspace/{projectId}/groups/{groupId}" +--- diff --git a/docs/api-reference/endpoints/project-groups/list.mdx b/docs/api-reference/endpoints/project-groups/list.mdx new file mode 100644 index 0000000000..1488fb6aeb --- /dev/null +++ b/docs/api-reference/endpoints/project-groups/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List Project Memberships" +openapi: "GET /api/v2/workspace/{projectId}/groups" +--- diff --git a/docs/api-reference/endpoints/project-groups/update.mdx b/docs/api-reference/endpoints/project-groups/update.mdx new file mode 100644 index 0000000000..8b963a1d87 --- /dev/null +++ b/docs/api-reference/endpoints/project-groups/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update Project Membership" +openapi: "PATCH /api/v2/workspace/{projectId}/groups/{groupId}" +--- diff --git a/docs/api-reference/endpoints/project-roles/create.mdx b/docs/api-reference/endpoints/project-roles/create.mdx index 2220b93090..7ebfff262d 100644 --- a/docs/api-reference/endpoints/project-roles/create.mdx +++ b/docs/api-reference/endpoints/project-roles/create.mdx @@ -2,3 +2,7 @@ title: "Create" openapi: "POST /api/v1/workspace/{projectSlug}/roles" --- + + + You can read more about the permissions field in the [permissions documentation](/internals/permissions). + \ No newline at end of file diff --git a/docs/api-reference/endpoints/project-templates/create.mdx b/docs/api-reference/endpoints/project-templates/create.mdx new file mode 100644 index 0000000000..d4eeb37128 --- /dev/null +++ b/docs/api-reference/endpoints/project-templates/create.mdx @@ -0,0 +1,8 @@ +--- +title: "Create" +openapi: "POST /api/v1/project-templates" +--- + + + You can read more about the role's permissions field in the [permissions documentation](/internals/permissions). + \ No newline at end of file diff --git a/docs/api-reference/endpoints/project-templates/delete.mdx b/docs/api-reference/endpoints/project-templates/delete.mdx new file mode 100644 index 0000000000..36078b30a5 --- /dev/null +++ b/docs/api-reference/endpoints/project-templates/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/project-templates/{templateId}" +--- diff --git a/docs/api-reference/endpoints/project-templates/get-by-id.mdx b/docs/api-reference/endpoints/project-templates/get-by-id.mdx new file mode 100644 index 0000000000..4e5317aeef --- /dev/null +++ b/docs/api-reference/endpoints/project-templates/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get By ID" +openapi: "GET /api/v1/project-templates/{templateId}" +--- diff --git a/docs/api-reference/endpoints/project-templates/list.mdx b/docs/api-reference/endpoints/project-templates/list.mdx new file mode 100644 index 0000000000..d0bee0bb97 --- /dev/null +++ b/docs/api-reference/endpoints/project-templates/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v1/project-templates" +--- diff --git a/docs/api-reference/endpoints/project-templates/update.mdx b/docs/api-reference/endpoints/project-templates/update.mdx new file mode 100644 index 0000000000..1b3e25a206 --- /dev/null +++ b/docs/api-reference/endpoints/project-templates/update.mdx @@ -0,0 +1,8 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/project-templates/{templateId}" +--- + + + You can read more about the role's permissions field in the [permissions documentation](/internals/permissions). + \ No newline at end of file diff --git a/docs/api-reference/endpoints/secret-tags/get-by-id.mdx b/docs/api-reference/endpoints/secret-tags/get-by-id.mdx new file mode 100644 index 0000000000..de02fe1332 --- /dev/null +++ b/docs/api-reference/endpoints/secret-tags/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get By ID" +openapi: "GET /api/v1/workspace/{projectId}/tags/{tagId}" +--- diff --git a/docs/api-reference/endpoints/secret-tags/get-by-slug.mdx b/docs/api-reference/endpoints/secret-tags/get-by-slug.mdx new file mode 100644 index 0000000000..91eab730f5 --- /dev/null +++ b/docs/api-reference/endpoints/secret-tags/get-by-slug.mdx @@ -0,0 +1,4 @@ +--- +title: "Get By Slug" +openapi: "GET /api/v1/workspace/{projectId}/tags/slug/{tagSlug}" +--- diff --git a/docs/api-reference/endpoints/secret-tags/update.mdx b/docs/api-reference/endpoints/secret-tags/update.mdx new file mode 100644 index 0000000000..b9c290db8a --- /dev/null +++ b/docs/api-reference/endpoints/secret-tags/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/workspace/{projectId}/tags/{tagId}" +--- diff --git a/docs/api-reference/endpoints/token-auth/attach.mdx b/docs/api-reference/endpoints/token-auth/attach.mdx new file mode 100644 index 0000000000..a53855db3c --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/attach.mdx @@ -0,0 +1,4 @@ +--- +title: "Attach" +openapi: "POST /api/v1/auth/token-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/token-auth/create-token.mdx b/docs/api-reference/endpoints/token-auth/create-token.mdx new file mode 100644 index 0000000000..e147a4e126 --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/create-token.mdx @@ -0,0 +1,4 @@ +--- +title: "Create Token" +openapi: "POST /api/v1/auth/token-auth/identities/{identityId}/tokens" +--- diff --git a/docs/api-reference/endpoints/token-auth/get-tokens.mdx b/docs/api-reference/endpoints/token-auth/get-tokens.mdx new file mode 100644 index 0000000000..5f690a298d --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/get-tokens.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Tokens" +openapi: "GET /api/v1/auth/token-auth/identities/{identityId}/tokens" +--- diff --git a/docs/api-reference/endpoints/token-auth/retrieve.mdx b/docs/api-reference/endpoints/token-auth/retrieve.mdx new file mode 100644 index 0000000000..ed6bb762c0 --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/retrieve.mdx @@ -0,0 +1,4 @@ +--- +title: "Retrieve" +openapi: "GET /api/v1/auth/token-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/token-auth/revoke-token.mdx b/docs/api-reference/endpoints/token-auth/revoke-token.mdx new file mode 100644 index 0000000000..ca6a0f056c --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/revoke-token.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke Token" +openapi: "POST /api/v1/auth/token-auth/tokens/{tokenId}/revoke" +--- diff --git a/docs/api-reference/endpoints/token-auth/revoke.mdx b/docs/api-reference/endpoints/token-auth/revoke.mdx new file mode 100644 index 0000000000..52e295fd0c --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "DELETE /api/v1/auth/token-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/token-auth/update-token.mdx b/docs/api-reference/endpoints/token-auth/update-token.mdx new file mode 100644 index 0000000000..cf5e0f5687 --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/update-token.mdx @@ -0,0 +1,4 @@ +--- +title: "Update Token" +openapi: "PATCH /api/v1/auth/token-auth/tokens/{tokenId}" +--- diff --git a/docs/api-reference/endpoints/token-auth/update.mdx b/docs/api-reference/endpoints/token-auth/update.mdx new file mode 100644 index 0000000000..c9e332e8fd --- /dev/null +++ b/docs/api-reference/endpoints/token-auth/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/auth/token-auth/identities/{identityId}" +--- diff --git a/docs/api-reference/endpoints/universal-auth/get-client-secret-by-id.mdx b/docs/api-reference/endpoints/universal-auth/get-client-secret-by-id.mdx new file mode 100644 index 0000000000..477ee875cd --- /dev/null +++ b/docs/api-reference/endpoints/universal-auth/get-client-secret-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Client Secret By ID" +openapi: "GET /api/v1/auth/universal-auth/identities/{identityId}/client-secrets/{clientSecretId}" +--- diff --git a/docs/api-reference/endpoints/universal-auth/revoke.mdx b/docs/api-reference/endpoints/universal-auth/revoke.mdx new file mode 100644 index 0000000000..e2a19e93c3 --- /dev/null +++ b/docs/api-reference/endpoints/universal-auth/revoke.mdx @@ -0,0 +1,4 @@ +--- +title: "Revoke" +openapi: "DELETE /api/v1/auth/universal-auth/identities/{identityId}" +--- diff --git a/docs/changelog/overview.mdx b/docs/changelog/overview.mdx index d73c0bb144..9739fd9e9b 100644 --- a/docs/changelog/overview.mdx +++ b/docs/changelog/overview.mdx @@ -4,6 +4,81 @@ title: "Changelog" The changelog below reflects new product developments and updates on a monthly basis. + +## October 2024 +- Significantly improved performance of audit log operations in UI. +- Released [Databricks integration](https://infisical.com/docs/integrations/cloud/databricks). +- Added ability to enforce 2FA organization-wide. +- Added multiple resource to the [Infisical Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs), including AWS and GCP integrations. +- Released [Infisical KMS](https://infisical.com/docs/documentation/platform/kms/overview). +- Added support for [LDAP dynamic secrets](https://infisical.com/docs/documentation/platform/ldap/overview). +- Enabled changing auth methods for machine identities in the UI. +- Launched [Infisical EU Cloud](https://eu.infisical.com). + +## September 2024 +- Improved paginations for identities and secrets. +- Significant improvements to the [Infisical Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs). +- Created [Slack Integration](https://infisical.com/docs/documentation/platform/workflow-integrations/slack-integration#slack-integration) for Access Requests and Approval Workflows. +- Added Dynamic Secrets for [Elaticsearch](https://infisical.com/docs/documentation/platform/dynamic-secrets/elastic-search) and [MongoDB](https://infisical.com/docs/documentation/platform/dynamic-secrets/mongo-db). +- More authentication methods are now supported by Infisical SDKs and Agent. +- Integrations now have dedicated audit logs and an overview screen. +- Added support for secret referencing in the Terraform Provider. +- Released support for [older versions of .NET](https://www.nuget.org/packages/Infisical.Sdk#supportedframeworks-body-tab) via SDK. +- Released Infisical PKI Issuer which works alongside `cert-manager` to manage certificates in Kubernetes. + +## August 2024 +- Added [Azure DevOps integration](https://infisical.com/docs/integrations/cloud/azure-devops). +- Released ability to hot-reload variables in CLI ([--watch flag](https://infisical.com/docs/cli/commands/run#infisical-run:watch)). +- Added Dynamic Secrets for [Redis](https://infisical.com/docs/documentation/platform/dynamic-secrets/redis). +- Added [Alerting](https://infisical.com/docs/documentation/platform/pki/alerting) for Certificate Management. +- You can now specify roles and project memberships when adding new users. +- Approval workflows now have email notifications. +- Access requests are now integrated with User Groups. +- Released ability to use IAM Roles for AWS Integrations. + +## July 2024 +- Released the official [Ruby SDK](https://infisical.com/docs/sdks/languages/ruby). +- Increased the speed and efficiency of secret operations. +- Released AWS KMS wrapping (bring your own key). +- Users can now log in to CLI via SSO in non-browser environments. +- Released [Slack Webhooks](https://infisical.com/docs/documentation/platform/webhooks). +- Added [Dynamic Secrets with MS SQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/mssql). +- Redesigned and simplified the Machine Identities page. +- Added the ability to move secrets/folders to another location. +- Added [OIDC](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general) support to CLI, Go SDK, and more. +- Released [Linux installer for Infisical](https://infisical.com/docs/self-hosting/deployment-options/native/standalone-binary). + +## June 2024 +- Released [Infisical PKI](https://infisical.com/docs/documentation/platform/pki/overview). +- Released the official [Go SDK](https://infisical.com/docs/sdks/languages/go). +- Released [OIDC Authentication method](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general). +- Allowed users to configure log retention periods on self-hosted instances. +- Added [tags](https://registry.terraform.io/providers/Infisical/infisical/latest/docs/resources/secret_tag) to terraform provider. +- Released [public secret sharing](https://share.infisical.com). +- Built a [native integration with Rundeck](https://infisical.com/docs/integrations/cicd/rundeck). +- Added list view for projects in the dashboard. +- Fixed offline coding mode in CLI. +- Users are now able to leave a particular project themselves. + +## May 2024 +- Released [AWS](https://infisical.com/docs/documentation/platform/identities/aws-auth), [GCP](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure](https://infisical.com/docs/documentation/platform/identities/azure-auth), and [Kubernetes](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth) Native Auth Methods. +- Added [Secret Sharing](https://infisical.com/docs/documentation/platform/secret-sharing) functionality for sharing sensitive data through encrypted links – within and outside of an organization. +- Updated [Secret Referencing](https://infisical.com/docs/documentation/platform/secret-reference) to be supported in all Infisical clients. Infisical UI is now able to provide automatic reference suggestions when typing. +- Released new [Infisical Jenkins Plugin](https://infisical.com/docs/integrations/cicd/jenkins). +- Added statuses and manual sync option to integrations in the Dashboard UI. +- Released universal [Audit Log Streaming](https://infisical.com/docs/documentation/platform/audit-log-streams). +- Added [Dynamic Secret template for AWS IAM](https://infisical.com/docs/documentation/platform/dynamic-secrets/aws-iam). +- Added support for syncing tags and custom KMS keys to [AWS Secrets Manager](https://infisical.com/docs/integrations/cloud/aws-secret-manager) and [Parameter Store](https://infisical.com/docs/integrations/cloud/aws-parameter-store) Integrations. +- Officially released Infisical on [AWS Marketplace](https://infisical.com/blog/infisical-launches-on-aws-marketplace). + +## April 2024 +- Added [Access Requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests) as part of self-serve secrets management workflows. +- Added [Temporary Access Provisioning](https://infisical.com/docs/documentation/platform/access-controls/temporary-access) for roles and additional privileges. + +## March 2024 +- Released support for [Dynamic Secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview). +- Released the concept of [Additional Privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges) on top of user/machine roles. + ## Feb 2024 - Added org-scoped authentication enforcement for SAML - Added support for [SCIM](https://infisical.com/docs/documentation/platform/scim/overview) along with instructions for setting it up with [Okta](https://infisical.com/docs/documentation/platform/scim/okta), [Azure](https://infisical.com/docs/documentation/platform/scim/azure), and [JumpCloud](https://infisical.com/docs/documentation/platform/scim/jumpcloud). diff --git a/docs/cli/commands/export.mdx b/docs/cli/commands/export.mdx index 16c2260843..6711903ece 100644 --- a/docs/cli/commands/export.mdx +++ b/docs/cli/commands/export.mdx @@ -51,7 +51,6 @@ infisical export --template= Alternatively, you may use service tokens. - Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). ```bash # Example export INFISICAL_TOKEN= diff --git a/docs/cli/commands/login.mdx b/docs/cli/commands/login.mdx index 2758ced008..f493ff5d25 100644 --- a/docs/cli/commands/login.mdx +++ b/docs/cli/commands/login.mdx @@ -7,32 +7,238 @@ description: "Login into Infisical from the CLI" infisical login ``` -## Description -The CLI uses authentication to verify your identity. When you enter the correct email and password for your account, a token is generated and saved in your system Keyring to allow you to make future interactions with the CLI. +### Description + +The CLI uses authentication to verify your identity. When you enter the correct email and password for your account, a token is generated and saved in your system Keyring to allow you to make future interactions with the CLI. To change where the login credentials are stored, visit the [vaults command](./vault). If you have added multiple users, you can switch between the users by using the [user command](./user). + + When you authenticate with **any other method than `user`**, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable. + + Use flag `--plain` along with `--silent` to print only the token in plain text when using a machine identity auth method. + + + +### Authentication Methods + +The Infisical CLI supports multiple authentication methods. Below are the available authentication methods, with their respective flags. + + + + The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical. + + + + + Your machine identity client ID. + + + Your machine identity client secret. + + + + + + + + To create a universal auth machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/universal-auth). + + + Run the `login` command with the following flags to obtain an access token: + + ```bash + infisical login --method=universal-auth --client-id= --client-secret= + ``` + + + + + + The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical. + + + + + Your machine identity ID. + + + Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`. + + + + + + + + To create a Kubernetes machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/kubernetes-auth). + + + Run the `login` command with the following flags to obtain an access token: + + ```bash + # --service-account-token-path is optional, and will default to '/var/run/secrets/kubernetes.io/serviceaccount/token' if not provided. + infisical login --method=kubernetes --machine-identity-id= --service-account-token-path= + ``` + + + + + + + The Native Azure method is used to authenticate with Infisical when running in an Azure environment. + + + + + Your machine identity ID. + + + + + + + To create an Azure machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/azure-auth). + + + Run the `login` command with the following flags to obtain an access token: + + ```bash + infisical login --method=azure --machine-identity-id= + ``` + + + + + + The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment. + + + + + Your machine identity ID. + + + + + + + To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth). + + + Run the `login` command with the following flags to obtain an access token: + + ```bash + infisical login --method=gcp-id-token --machine-identity-id= + ``` + + + + + + The GCP IAM method is used to authenticate with Infisical with a GCP service account key. + + + + + Your machine identity ID. + + + Path to your GCP service account key file _(Must be in JSON format!)_ + + + + + + + To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth). + + + Run the `login` command with the following flags to obtain an access token: + + ```bash + infisical login --method=gcp-iam --machine-identity-id= --service-account-key-file-path= + ``` + + + + + + The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc. + + + + + Your machine identity ID. + + + + + + + To create an AWS machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/aws-auth). + + + Run the `login` command with the following flags to obtain an access token: + + ```bash + infisical login --method=aws-iam --machine-identity-id= + ``` + + + + + + The OIDC Auth method is used to authenticate with Infisical via identity tokens with OIDC. + + + + + Your machine identity ID. + + + The OIDC JWT from the identity provider. + + + + + + + To create an OIDC machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/oidc-auth/general). + + + Run the `login` command with the following flags to obtain an access token: + + ```bash + infisical login --method=oidc-auth --machine-identity-id= --oidc-jwt= + ``` + + + + + ### Flags + +The login command supports a number of flags that you can use for different authentication methods. Below is a list of all the flags that can be used with the login command. + + ```bash infisical login --method= # Optional, will default to 'user'. ``` #### Valid values for the `method` flag are: - - `user`: Login using email and password. + - `user`: Login using email and password. (default) - `universal-auth`: Login using a universal auth client ID and client secret. - - - When `method` is set to `universal-auth`, the `client-id` and `client-secret` flags are required. Optionally you can set the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` and `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variables instead of using the flags. - - When you authenticate with universal auth, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable. - - Use flag `--plain` along with `--silent` to print only the token in plain text when using the `universal-auth` method. - - + - `kubernetes`: Login using a Kubernetes native auth. + - `azure`: Login using an Azure native auth. + - `gcp-id-token`: Login using a GCP ID token native auth. + - `gcp-iam`: Login using a GCP IAM. + - `aws-iam`: Login using an AWS IAM native auth. + - `oidc-auth`: Login using oidc auth. @@ -41,24 +247,112 @@ If you have added multiple users, you can switch between the users by using the ``` #### Description - The client ID of the universal auth client. This is required if the `--method` flag is set to `universal-auth`. + The client ID of the universal auth machine identity. This is required if the `--method` flag is set to `universal-auth`. The `client-id` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` environment variable. + ```bash infisical login --client-secret= # Optional, required if --method=universal-auth. ``` #### Description - The client secret of the universal auth client. This is required if the `--method` flag is set to `universal-auth`. + The client secret of the universal auth machine identity. This is required if the `--method` flag is set to `universal-auth`. The `client-secret` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variable. - + + + + ```bash + infisical login --machine-identity-id= # Optional, required if --method=kubernetes, azure, gcp-id-token, gcp-iam, or aws-iam. + ``` + + #### Description + The ID of the machine identity. This is required if the `--method` flag is set to `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, or `aws-iam`. + + + The `machine-identity-id` flag can be substituted with the `INFISICAL_MACHINE_IDENTITY_ID` environment variable. + + + + + ```bash + infisical login --service-account-token-path= # Optional Will default to '/var/run/secrets/kubernetes.io/serviceaccount/token'. + ``` + + #### Description + The path to the Kubernetes service account token to use for authentication. + This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`. + + + The `service-account-token-path` flag can be substituted with the `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH` environment variable. + + + + + ```bash + infisical login --service-account-key-file-path= # Optional, but required if --method=gcp-iam. + ``` + + #### Description + The path to your GCP service account key file. This is required if the `--method` flag is set to `gcp-iam`. + + + The `service-account-key-path` flag can be substituted with the `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` environment variable. + + + + + + + ```bash + infisical login --oidc-jwt= + ``` + + #### Description + The JWT provided by an identity provider for OIDC authentication. + + + The `oidc-jwt` flag can be substituted with the `INFISICAL_OIDC_AUTH_JWT` environment variable. + + - \ No newline at end of file +### Machine Identity Authentication Quick Start + +In this example we'll be using the `universal-auth` method to login to obtain an Infisical access token, which we will then use to fetch secrets with. + + + + ```bash + export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id= --client-secret= --silent --plain) # silent and plain is important to ensure only the token itself is printed, so we can easily set it as an environment variable. + ``` + + Now that we've set the `INFISICAL_TOKEN` environment variable, we can use the CLI to interact with Infisical. The CLI will automatically check for the presence of the `INFISICAL_TOKEN` environment variable and use it for authentication. + + + Alternatively, if you would rather use the `--token` flag to pass the token directly, you can do so by running the following command: + + ```bash + infisical [command] --token= # The token output from the login command. + ``` + + + + ```bash + infisical secrets --projectId= + The `--recursive`, and `--env` flag is optional and will fetch all secrets in subfolders. The default environment is `dev` if no `--env` flag is provided. + + + + diff --git a/docs/cli/commands/run.mdx b/docs/cli/commands/run.mdx index 74aa84947c..c7872efb0d 100644 --- a/docs/cli/commands/run.mdx +++ b/docs/cli/commands/run.mdx @@ -47,20 +47,18 @@ $ infisical run -- npm run dev Used to fetch secrets via a [machine identity](/documentation/platform/identities/machine-identities) apposed to logged in credentials. Simply, export this variable in the terminal before running this command. ```bash - # Example - export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id= --client-secret= --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages. + # Example + export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id= --client-secret= --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages. ``` Alternatively, you may use service tokens. - Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). ```bash - # Example - export INFISICAL_TOKEN= + # Example + export INFISICAL_TOKEN= ``` - - + @@ -69,22 +67,30 @@ $ infisical run -- npm run dev To use, simply export this variable in the terminal before running this command. ```bash - # Example - export INFISICAL_DISABLE_UPDATE_CHECK=true + # Example + export INFISICAL_DISABLE_UPDATE_CHECK=true ``` - ### Flags - + + By passing the `watch` flag, you are telling the CLI to watch for changes that happen in your Infisical project. + If secret changes happen, the command you provided will automatically be restarted with the new environment variables attached. + + ```bash + # Example + infisical run --watch -- printenv + ``` + + + Explicitly set the directory where the .infisical.json resides. This is useful for some monorepo setups. ```bash - # Example - infisical run --project-config-dir=/some-dir -- printenv + # Example + infisical run --project-config-dir=/some-dir -- printenv ``` - @@ -172,3 +178,19 @@ $ infisical run -- npm run dev + + +## Automatically reload command when secrets change + +To automatically reload your command when secrets change, use the `--watch` flag. + +```bash +infisical run --watch -- npm run dev +``` + +This will watch for changes in your secrets and automatically restart your command with the new secrets. +When your command restarts, it will have the new environment variables injeceted into it. + + + Please note that this feature is intended for development purposes. It is not recommended to use this in production environments. Generally it's not recommended to automatically reload your application in production when remote changes are made. + \ No newline at end of file diff --git a/docs/cli/commands/secrets.mdx b/docs/cli/commands/secrets.mdx index c279b1a155..2dff5cf7b0 100644 --- a/docs/cli/commands/secrets.mdx +++ b/docs/cli/commands/secrets.mdx @@ -33,7 +33,6 @@ $ infisical secrets Alternatively, you may use service tokens. - Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). ```bash # Example export INFISICAL_TOKEN= @@ -88,6 +87,27 @@ $ infisical secrets ``` + + The `--plain` flag will output all your secret values without formatting, one per line. + + ```bash + # Example + infisical secrets --plain --silent + ``` + + + + + The `--silent` flag disables output of tip/info messages. Useful when running in scripts or CI/CD pipelines. + + ```bash + # Example + infisical secrets --silent + ``` + + Can be used inline to replace `INFISICAL_DISABLE_UPDATE_CHECK` + + @@ -99,6 +119,7 @@ $ infisical secrets get ... # Example $ infisical secrets get DOMAIN +$ infisical secrets get DOMAIN PORT ``` @@ -111,7 +132,41 @@ $ infisical secrets get DOMAIN - + + The `--plain` flag will output all your requested secret values without formatting, one per line. + + Default value: `false` + + ```bash + # Example + infisical secrets get FOO --plain + infisical secrets get FOO BAR --plain + + # Fetch a single value and assign it to a variable + API_KEY=$(infisical secrets get FOO --plain --silent) + ``` + + + When running in CI/CD environments or in a script, set `INFISICAL_DISABLE_UPDATE_CHECK=true` or add the `--silent` flag. This will help hide any CLI info/debug output and only show the secret value. + + + + + + The `--silent` flag disables output of tip/info messages. Useful when running in scripts or CI/CD pipelines. + + ```bash + # Example + infisical secrets get FOO --plain --silent + ``` + + Can be used inline to replace `INFISICAL_DISABLE_UPDATE_CHECK` + + + + + Use `--plain` instead, as it supports single and multiple secrets. + Used to print the plain value of a single requested secret without any table style. Default value: `false` @@ -119,10 +174,11 @@ $ infisical secrets get DOMAIN Example: `infisical secrets get DOMAIN --raw-value` - When running in CI/CD environments or in a script, set `INFISICAL_DISABLE_UPDATE_CHECK` env to `true`. This will help hide any CLI update messages and only show the secret value. + When running in CI/CD environments or in a script, set `INFISICAL_DISABLE_UPDATE_CHECK=true` or add the `--silent` flag. This will help hide any CLI info/debug output and only show the secret value. + diff --git a/docs/cli/commands/token.mdx b/docs/cli/commands/token.mdx index 5b0d4ad5c1..9f631f07c3 100644 --- a/docs/cli/commands/token.mdx +++ b/docs/cli/commands/token.mdx @@ -4,7 +4,7 @@ description: "Manage your Infisical identity access tokens" --- ```bash -infisical service-token renew +infisical token renew ``` ## Description diff --git a/docs/cli/commands/vault.mdx b/docs/cli/commands/vault.mdx index 9030c580cf..b09513c345 100644 --- a/docs/cli/commands/vault.mdx +++ b/docs/cli/commands/vault.mdx @@ -30,8 +30,5 @@ description: "Change the vault type in Infisical" ## Description -To safeguard your login details when using the CLI, Infisical places them in a system vault or an encrypted text file, protected by a passphrase that only the user knows. - -To avoid constantly entering your passphrase when using the `file` vault type, set the `INFISICAL_VAULT_FILE_PASSPHRASE` environment variable with your password in your shell - +To safeguard your login details when using the CLI, Infisical attempts to store them in a system keyring. If a system keyring cannot be found on your machine, the data is stored in a config file. diff --git a/docs/cli/overview.mdx b/docs/cli/overview.mdx index ab913ec1af..397d1f474f 100644 --- a/docs/cli/overview.mdx +++ b/docs/cli/overview.mdx @@ -9,7 +9,7 @@ You can use it across various environments, whether it's local development, CI/C ## Installation - + Use [brew](https://brew.sh/) package manager ```bash @@ -21,9 +21,8 @@ You can use it across various environments, whether it's local development, CI/C ```bash brew update && brew upgrade infisical ``` - - - + + Use [Scoop](https://scoop.sh/) package manager ```bash @@ -40,7 +39,20 @@ You can use it across various environments, whether it's local development, CI/C scoop update infisical ``` - + + + Use [NPM](https://www.npmjs.com/) package manager + + ```bash + npm install -g @infisical/cli + ``` + + ### Updates + + ```bash + npm update -g @infisical/cli + ``` + Install prerequisite ```bash diff --git a/docs/cli/usage.mdx b/docs/cli/usage.mdx index e372bd8cfd..d5b7acb4a7 100644 --- a/docs/cli/usage.mdx +++ b/docs/cli/usage.mdx @@ -167,7 +167,7 @@ For security and privacy concerns, we recommend you to configure your terminal t #### Method 2: Export environment variable - You can point the CLI to the self hosted Infisical instance by exporting the environment variable `INFISICAL_API_URL` in your terminal. + You can point the CLI to the self-hosted Infisical instance by exporting the environment variable `INFISICAL_API_URL` in your terminal. @@ -197,7 +197,7 @@ For security and privacy concerns, we recommend you to configure your terminal t #### Method 3: Set manually on every command -Another option to point the CLI to your self hosted Infisical instance is to set it via a flag on every command you run. +Another option to point the CLI to your self-hosted Infisical instance is to set it via a flag on every command you run. ```bash # Example @@ -206,8 +206,6 @@ infisical --domain="https://your-self-hosted-infisical.com/api" - Yes. Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - To use Infisical for non local development scenarios, please create a service token. The service token will allow you to authenticate and interact with Infisical. Once you have created a service token with the required permissions, you’ll need to feed the token to the CLI. ```bash diff --git a/docs/contributing/platform/backend/how-to-create-a-feature.mdx b/docs/contributing/platform/backend/how-to-create-a-feature.mdx index e77313dab1..8449eb5013 100644 --- a/docs/contributing/platform/backend/how-to-create-a-feature.mdx +++ b/docs/contributing/platform/backend/how-to-create-a-feature.mdx @@ -49,8 +49,8 @@ Server-related logic is handled in `/src/server`. To connect the service layer t ## Writing API Routes -1. To create a route component, run `npm generate:component`. +1. To create a route component, run `npm run generate:component`. 2. Select option 3, type the router name in dash-case, and provide the version number. This will generate a router file in `src/server/routes/v/` 1. Implement your logic to connect with the service layer as needed. 2. Import the router component in the version folder's index.ts. For instance, if it's in v1, import it in `v1/index.ts`. - 3. Finally, register it under the appropriate prefix for access. \ No newline at end of file + 3. Finally, register it under the appropriate prefix for access. diff --git a/docs/contributing/platform/developing.mdx b/docs/contributing/platform/developing.mdx index a6675b6f67..68add46031 100644 --- a/docs/contributing/platform/developing.mdx +++ b/docs/contributing/platform/developing.mdx @@ -1,6 +1,6 @@ --- -title: 'Local development' -description: 'This guide will help you set up and run the Infisical platform in local development.' +title: "Local development" +description: "This guide will help you set up and run the Infisical platform in local development." --- ## Fork and clone the repo @@ -15,28 +15,28 @@ git checkout -b MY_BRANCH_NAME ## Set up environment variables - Start by creating a .env file at the root of the Infisical directory then copy the contents of the file linked [here](https://github.com/Infisical/infisical/blob/main/.env.example). View all available [environment variables](https://infisical.com/docs/self-hosting/configuration/envars) and guidance for each. ## Starting Infisical for development We use Docker to spin up all required services for Infisical in local development. If you are unfamiliar with Docker, don’t worry, all you have to do is install Docker for your -machine and run the command below to start up the development server. +machine and run the command below to start up the development server. -#### Start local server +#### Start local server ```bash -docker-compose -f docker-compose.dev.yml up --build --force-recreate +docker compose -f docker-compose.dev.yml up --build --force-recreate ``` -#### Access local server + +#### Access local server Once all the services have spun up, browse to http://localhost:8080. -#### Shutdown local server +#### Shutdown local server ```bash # To stop environment use Control+C (on Mac) CTRL+C (on Win) or -docker-compose -f docker-compose.dev.yml down +docker compose -f docker-compose.dev.yml down ``` ## Starting Infisical docs locally @@ -56,9 +56,10 @@ yarn global add mintlify ``` #### Running the docs + Go to `docs` directory and run `mintlify dev`. This will start up the docs on `localhost:3000` ```bash # From the root directory cd docs; mintlify dev; -``` \ No newline at end of file +``` diff --git a/docs/documentation/getting-started/introduction.mdx b/docs/documentation/getting-started/introduction.mdx index 06455092dc..d73c28ab56 100644 --- a/docs/documentation/getting-started/introduction.mdx +++ b/docs/documentation/getting-started/introduction.mdx @@ -4,10 +4,7 @@ sidebarTitle: "What is Infisical?" description: "An Introduction to the Infisical secret management platform." --- -Infisical is an [open-source](https://github.com/infisical/infisical) secret management platform for developers. -It provides capabilities for storing, managing, and syncing application configuration and secrets like API keys, database -credentials, and certificates across infrastructure. In addition, Infisical prevents secrets leaks to git and enables secure -sharing of secrets among engineers. +**[Infisical](https://infisical.com)** is the open source secret management platform that developers use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI. Additionally, developers use Infisical to prevent secrets leaks to git and securely share secrets amongst engineers. Start managing secrets securely with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself. diff --git a/docs/documentation/getting-started/platform.mdx b/docs/documentation/getting-started/platform.mdx index 1a1164a40a..7ce96a0ed8 100644 --- a/docs/documentation/getting-started/platform.mdx +++ b/docs/documentation/getting-started/platform.mdx @@ -12,14 +12,14 @@ From there, you can invite external members to the organization and start creati ### Projects The **Projects** page shows you all the projects that you have access to within your organization. -Here, you can also create a new project. +Here, you can also create a new project. ![organization overview](../../images/organization-overview.png) ### Members -The **Members** page lets you add or remove external members to your organization. -Note that you can configure your organization in Infisical to have members authenticate with the platform via protocols like SAML 2.0. +The **Members** page lets you add or remove external members to your organization. +Note that you can configure your organization in Infisical to have members authenticate with the platform via protocols like SAML 2.0 and OpenID Connect. ![organization members](../../images/organization/platform/organization-members.png) @@ -35,13 +35,14 @@ The **Secrets Overview** screen provides a bird's-eye view of all the secrets in ![dashboard secrets overview](../../images/dashboard-secrets-overview.png) In the above image, you can already see that: + - `STRIPE_API_KEY` is missing from the **Staging** environment. - `JWT_SECRET` is missing from the **Production** environment. - `BAR` is `EMPTY` in the **Production** environment. ### Dashboard -The secrets dashboard lets you manage secrets for a specific environment in a project. +The secrets dashboard lets you manage secrets for a specific environment in a project. Here, developers can override secrets, version secrets, rollback projects to any point in time and much more. ![dashboard](../../images/dashboard.png) @@ -61,4 +62,4 @@ which you can assign to members. That's it for the platform quickstart! — We encourage you to continue exploring the documentation to gain a deeper understanding of the extensive features and functionalities that Infisical has to offer. -Next, head back to [Getting Started > Introduction](/documentation/getting-started/overview) to explore ways to fetch secrets from Infisical to your apps and infrastructure. \ No newline at end of file +Next, head back to [Getting Started > Introduction](/documentation/getting-started/overview) to explore ways to fetch secrets from Infisical to your apps and infrastructure. diff --git a/docs/documentation/guides/local-development.mdx b/docs/documentation/guides/local-development.mdx index c2651cb589..6d606bafe7 100644 --- a/docs/documentation/guides/local-development.mdx +++ b/docs/documentation/guides/local-development.mdx @@ -9,15 +9,15 @@ description: "Learn how to manage secrets in local development environments." There is a number of issues that arise with secret management in local development environment: 1. **Getting secrets onto local machines**. When new developers join or a new project is created, the process of getting the development set of secrets onto local machines is often unclear. As a result, developers end up spending a lot of time onboarding and risk potentially following insecure practices when sharing secrets from one developer to another. 2. **Syncing secrets with teammates**. One of the problems with .env files is that they become unsynced when one of the developers updates a secret or configuration. Even if the rest of the team is notified, developers don't make all the right changes immediately, and later on end up spending a lot of time debugging an issue due to missing environment variables. This leads to a lot of inefficiencies and lost time. -3. **Accidentally leaking secrets**. When developing locally, it's common for developers to accidentally leak a hardcoded as part of a commit. As soon as the secret is part of the git history, it becomes hard to get it removed and create a security vulnerability. +3. **Accidentally leaking secrets**. When developing locally, it's common for developers to accidentally leak a hardcoded secret as part of a commit. As soon as the secret is part of the git history, it becomes hard to get it removed and create a security vulnerability. ## Solution -One of the main benefits of Infisical is the facilitation of secret management workflows in local development use cases. In particular, Infisical heavily follows the "Security Shift Left" principle to enable developers to effotlessly follow secure practices when coding. +One of the main benefits of Infisical is the facilitation of secret management workflows in local development use cases. In particular, Infisical heavily follows the "Security Shift Left" principle to enable developers to effortlessly follow secure practices when coding. ### CLI -[Infisical CLI](/cli/overview) is the most frequently used Infisical tool for secret management in local development environments. It makes it easy to inject secrets right into the local application environments based on the permissions given to corresponsing developers. +[Infisical CLI](/cli/overview) is the most frequently used Infisical tool for secret management in local development environments. It makes it easy to inject secrets right into the local application environments based on the permissions given to corresponding developers. ### Dashboard @@ -31,4 +31,4 @@ By default, all the secrets in the Infisical environments are shared among proje ### Secret Scanning -In addition, Infisical also provides a set of tools to automatically prevent secret leaks to git history. This functionlality can be set up on the level of [Infisical CLI using pre-commit hooks](/cli/scanning-overview#automatically-scan-changes-before-you-commit) or through a direct integration with platforms like GitHub. \ No newline at end of file +In addition, Infisical also provides a set of tools to automatically prevent secret leaks to git history. This functionality can be set up on the level of [Infisical CLI using pre-commit hooks](/cli/scanning-overview#automatically-scan-changes-before-you-commit) or through a direct integration with platforms like GitHub. diff --git a/docs/documentation/guides/migrating-from-envkey.mdx b/docs/documentation/guides/migrating-from-envkey.mdx new file mode 100644 index 0000000000..e1d75bab07 --- /dev/null +++ b/docs/documentation/guides/migrating-from-envkey.mdx @@ -0,0 +1,41 @@ +--- +title: "Migrating from EnvKey to Infisical" +sidebarTitle: "Migration" +description: "Learn how to migrate from EnvKey to Infisical in the easiest way possible." +--- + +## What is Infisical? + +[Infisical](https://infisical.com) is an open-source all-in-one secret management platform that helps developers manage secrets (e.g., API-keys, DB access tokens, [certificates](https://infisical.com/docs/documentation/platform/pki/overview)) across their infrastructure. In addition, Infisical provides [secret sharing](https://infisical.com/docs/documentation/platform/secret-sharing) functionality, ability to [prevent secret leaks](https://infisical.com/docs/cli/scanning-overview), and more. + +Infisical is used by 10,000+ organizations across all industries including First American Financial Corporation, Delivery Hero, and [Hugging Face](https://infisical.com/customers/hugging-face). + +## Migrating from EnvKey + + + +Open the EnvKey dashboard and go to My Org. +![EnvKey Dashboard](../../images/guides/import-envkey/envkey-dashboard.png) + + +Go to Import/Export on the top right corner, Click on Export Org and save the exported file. +![Export organization](../../images/guides/import-envkey/envkey-export.png) + + +Click on copy to copy the encryption key and save it. +![Copy encryption key](../../images/guides/import-envkey/copy-encryption-key.png) + + +Open the Infisical dashboard and go to Organization Settings > Import. +![Infisical Organization settings](../../images/guides/import-envkey/infisical-import-dashboard.png) + + +Upload the exported file from EnvKey, paste the encryption key and click Import. +![Infisical Import EnvKey](../../images/guides/import-envkey/infisical-import-envkey.png) + + + + +## Talk to our team + +To make the migration process even more seamless, you can [schedule a meeting with our team](https://infisical.cal.com/vlad/migration-from-envkey-to-infisical) to learn more about how Infisical compares to EnvKey and discuss unique needs of your organization. You are also welcome to email us at [support@infisical.com](mailto:support@infisical.com) to ask any questions or get any technical help. diff --git a/docs/documentation/guides/organization-structure.mdx b/docs/documentation/guides/organization-structure.mdx new file mode 100644 index 0000000000..6fd672164e --- /dev/null +++ b/docs/documentation/guides/organization-structure.mdx @@ -0,0 +1,71 @@ +--- +title: "Infisical Organizational Structure Blueprint" +sidebarTitle: "Organization Structure" +description: "Learn how to structure your projects, secrets, and other resources within Infisical." +--- + +Infisical is designed to provide comprehensive, centralized, and efficient management of secrets, certificates, and encryption keys within organizations. Below is an overview of Infisical's structured components, which developers and administrators can leverage for optimal project management and security posture. + +### 1. Projects + +- **Definition and Role**: [Projects](/documentation/platform/project) are the highest-level construct within an [organization](/documentation/platform/organization) in Infisical. They serve as the primary container for all functionalities. +- **Correspondence to Code Repositories**: Projects typically align with specific code repositories. +- **Functional Capabilities**: Each project encompasses features for managing secrets, certificates, and encryption keys, serving as the central hub for these resources. + +### 2. Environments + +- **Purpose**: Environments are designed for organizing and compartmentalizing secrets within projects. +- **Customization Options**: Environments can be tailored to align with existing infrastructure setups of any project. Default options include **Development**, **Staging**, and **Production**. +- **Structure**: Each environment inherently has a root level for storing secrets, but additional sub-organizations can be created through [folders](/documentation/platform/folder) for better secret management. + +### 3. Folders + +- **Use Case**: Folders are available for more advanced organizational needs, allowing logical separation of secrets. +- **Typical Structure**: Folders can correspond to specific logical units, such as microservices or different layers of an application, providing refined control over secrets. + +### 4. Imports + +- **Purpose and Benefits**: To promote reusability and avoid redundancy, Infisical supports the use of imports. This allows secrets, folders, or entire environments to be referenced across multiple projects as needed. +- **Best Practice**: Utilizing [secret imports](/documentation/platform/secret-reference#secret-imports) or [references](/documentation/platform/secret-reference#secret-referencing) ensures consistency and minimizes manual overhead. + +### 5. Approval Workflows + +- **Importance**: Implementing approval workflows is recommended for organizations aiming to enhance efficiency and strengthen their security posture. +- **Types of Workflows**: + - **[Access Requests](/documentation/platform/pr-workflows)**: This workflow allows developers to request access to sensitive resources. Such access can be configured for temporary use, a practice known as "just-in-time" access. + - **[Change Requests](/documentation/platform/access-controls/access-requests)**: Facilitates reviews and approvals when changes are proposed for sensitive environments or specific folders, ensuring proper oversight. + +### 6. Access Controls + +Infisical’s access control framework is unified for both human users and machine identities, ensuring consistent management across the board. + +### 6.1 Roles + +- **2 Role Types**: + - **Organization-Level Roles**: Provide broad access across the organization (e.g., ability to manage billing, configure settings, etc.). + - **Project-Level Roles**: Essential for configuring access to specific secrets and other sensitive assets within a project. +- **Granular Permissions**: While default roles are available, [custom roles](/documentation/platform/access-controls/role-based-access-controls#creating-custom-roles) can be created for more tailored access controls. +- **Admin Considerations**: Note that admin users are able to access all projects. This role should be assigned judiciously to prevent unintended overreach. + +Project access is defined not via an organization-level role, but rather through specific project memberships of both human and machine identities. Admin roles bypass this by default. + +### 6.2 Additional Privileges + +[Additional privileges](/documentation/platform/access-controls/additional-privileges) can be assigned to users and machines on an ad-hoc basis for specific scenarios where roles alone are insufficient. If you find yourself using additional privileges too much, it is recommended to create custom roles. Additional privileges can be temporary or permanent. + + + +### 6.3 Attribute-Based Access Control (ABAC) + +[Attribute-based Access Controls](/documentation/platform/access-controls/attribute-based-access-controls) allow restrictions based on tags or attributes linked to secrets. These can be integrated with SAML assertions and other security frameworks for dynamic access management. + +### 6.4 User Groups + +- **Application**: Organizations should use users groups in situations when they have a lot of developers with the same level of access (e.g., separated by team, department, seniority, etc.). +- **Synchronization**: [User groups](/documentation/platform/groups) can be synced with an identity provider to maintain consistency and reduce manual management. + +### **Implementation Note** + +For larger-scale organizations, automating configurations through **Terraform** or other infrastructure-as-code (IaC) tools is advisable. Manual configurations may lead to errors, so leveraging IaC enhances reliability and consistency in managing Infisical's robust capabilities. + +This structured approach ensures that Infisical's functionalities are fully leveraged, providing both flexibility and rigorous control over an organization's sensitive information and access needs. \ No newline at end of file diff --git a/docs/documentation/platform/access-controls/access-requests.mdx b/docs/documentation/platform/access-controls/access-requests.mdx index 45c155ab4e..76cc4b74eb 100644 --- a/docs/documentation/platform/access-controls/access-requests.mdx +++ b/docs/documentation/platform/access-controls/access-requests.mdx @@ -6,7 +6,7 @@ description: "Learn how to request access to sensitive resources in Infisical." In certain situations, developers need to expand their access to a certain new project or a sensitive environment. For those use cases, it is helpful to utilize Infisical's **Access Requests** functionality. This functionality works in the following way: -1. A project administrator sets up a policy that assigns access managers (also known as eligible approvers) to a certain sensitive folder or environment. +1. A project administrator sets up an access policy that assigns access managers (also known as eligible approvers) to a certain sensitive folder or environment. ![Create Access Request Policy Modal](/images/platform/access-controls/create-access-request-policy.png) ![Access Request Policies](/images/platform/access-controls/access-request-policies.png) @@ -14,9 +14,14 @@ This functionality works in the following way: ![Access Request Create](/images/platform/access-controls/request-access.png) ![Access Request Dashboard](/images/platform/access-controls/access-requests-pending.png) -3. An eligible approver can approve or reject the access request. -![Access Request Review](/images/platform/access-controls/review-access-request.png) +4. An eligible approver can approve or reject the access request. +{/* ![Access Request Review](/images/platform/access-controls/review-access-request.png) */} +![Access Request Bypass](/images/platform/access-controls/access-request-bypass.png) -4. As soon as the request is approved, developer is able to access the sought resources. + + If the access request matches with a policy that has a **Soft** enforcement level, the requester may bypass the policy and get access to the resource without full approval. + + +5. As soon as the request is approved, developer is able to access the sought resources. ![Access Request Dashboard](/images/platform/access-controls/access-requests-completed.png) diff --git a/docs/documentation/platform/access-controls/attribute-based-access-controls.mdx b/docs/documentation/platform/access-controls/attribute-based-access-controls.mdx new file mode 100644 index 0000000000..99c49c63ad --- /dev/null +++ b/docs/documentation/platform/access-controls/attribute-based-access-controls.mdx @@ -0,0 +1,65 @@ +--- +title: "Attribute-based Access Controls" +description: "Learn how to use ABAC to manage permissions based on identity attributes." +--- + +Infisical's Attribute-based Access Controls (ABAC) allow for dynamic, attribute-driven permissions for both user and machine identities. +ABAC policies use metadata attributes—stored as key-value pairs on identities—to enforce fine-grained permissions that are context aware. + +In ABAC, access controls are defined using metadata attributes, such as location or department, which can be set directly on user or machine identities. +During policy execution, these attributes are evaluated, and determine whether said actor can access the requested resource or perform the requested operation. + +## Project-level Permissions + +Attribute-based access controls are currently available for polices defined on projects. You can set ABAC permissions to control access to environments, folders, secrets, and secret tags. + +### Setting Metadata on Identities + + + + + + + + + + + + + + + + + For organizations using SAML for login, Infisical automatically maps metadata attributes from SAML assertions to user identities. + This makes it easy to create policies that dynamically adapt based on the SAML user’s attributes. + + + + +## Defining ABAC Policies + + + +ABAC policies make use of identity metadata to define dynamic permissions. Each attribute must start and end with double curly-brackets `{{ }}`. +The following attributes are available within project permissions: + +- **User ID**: `{{ identity.id }}` +- **Username**: `{{ identity.username }}` +- **Metadata Attributes**: `{{ identity.metadata. }}` + +During policy execution, these placeholders are replaced by their actual values prior to evaluation. + +### Example Use Case + +#### Location-based Access Control + +Suppose you want to restrict access to secrets within a specific folder based on a user's geographic region. +You could assign a `location` attribute to each user (e.g., `identity.metadata.location`). +You could then structure your folders to align with this attribute and define permissions accordingly. + +For example, a policy might restrict access to folders matching the user's location attribute in the following pattern: +``` +/appA/{{ identity.metadata.location }} +``` +Using this structure, users can only access folders that correspond to their configured `location` attribute. +Consequently, if a users attribute changes due to relocation, no policies need to be changed to gain access to the folders associated with their new location. diff --git a/docs/documentation/platform/access-controls/overview.mdx b/docs/documentation/platform/access-controls/overview.mdx index 54fc8ff25b..552117c7e4 100644 --- a/docs/documentation/platform/access-controls/overview.mdx +++ b/docs/documentation/platform/access-controls/overview.mdx @@ -15,6 +15,15 @@ To make sure that users and machine identities are only accessing the resources > Manage user and machine identitity permissions through predefined roles. + + + Manage user and machine identitity permissions based on their attributes. + + The Organization Admin Console can only be accessed by organization members with admin status. + + + +## Accessing the Organization Admin Console + +On the sidebar, tap on your initials to access the settings dropdown and press the **Organization Admin Console** option. + +![Access Organization Admin Console](/images/platform/admin-panels/access-org-admin-console.png) + +## Projects Tab + +The Projects tab lists all the projects within your organization, including those which you are not a member of. You can easily filter projects by name or slug using the search bar. + +![Projects Section](/images/platform/admin-panels/org-admin-console-projects.png) + + +### Accessing a Project in Your Organization + +You can access a project that you are not a member of by tapping on the options menu of the project row and pressing the **Access** button. +Doing so will grant you admin permissions for the selected project and add you as a member. + +![Access project](/images/platform/admin-panels/org-admin-console-access.png) + + diff --git a/docs/documentation/platform/admin-panel/overview.mdx b/docs/documentation/platform/admin-panel/overview.mdx new file mode 100644 index 0000000000..968728bfc2 --- /dev/null +++ b/docs/documentation/platform/admin-panel/overview.mdx @@ -0,0 +1,25 @@ +--- +description: "Learn about Infisical's Admin Consoles" +--- + +Infisical offers a server and organization level console for admins to customize their settings and manage various resources across the platform. + + + + Configure and manage server related features. + + + + View and access resources across your organization. + + diff --git a/docs/documentation/platform/admin-panel/server-admin.mdx b/docs/documentation/platform/admin-panel/server-admin.mdx new file mode 100644 index 0000000000..ddcf448a60 --- /dev/null +++ b/docs/documentation/platform/admin-panel/server-admin.mdx @@ -0,0 +1,69 @@ +--- +title: "Server Admin Console" +description: "Configure and manage server related features" +--- + +The Server Admin Console provides **server administrators** with the ability to +customize settings and manage users for their entire Infisical instance. + + + The first user to setup an account on your Infisical instance is designated as the server administrator by default. + + +## Accessing the Server Admin Console + + +On the sidebar, tap on your initials to access the settings dropdown and press the **Server Admin Console** option. + +![Access Server Admin Console](/images/platform/admin-panels/access-server-admin-panel.png) + +## General Tab +Configure general settings for your instance. + +![General Settings](/images/platform/admin-panels/admin-panel-general.png) + + +### Allow User Signups + +User signups are enabled by default, allowing **Anyone** with access to your instance to sign up. This can alternatively be **Disabled** to prevent any users from signing up. + +### Restrict Signup Domain + +Signup can be restricted to users matching one or more email domains, such as your organization's domain, to control who has access to your instance. + +### Default Organization + +If you're using SAML/LDAP/OIDC for only one organization on your instance, you can specify a default organization to use at login to skip requiring users to manually enter the organization slug. + +### Trust Emails + +By default, users signing up through SAML/LDAP/OIDC will still need to verify their email address to prevent email spoofing. This requirement can be skipped by enabling the switch to trust logins through the respective method. + + +## Authentication Tab + +From this tab, you can configure which login methods are enabled for your instance. + +![Authentication Settings](/images/platform/admin-panels/admin-panel-auths.png) + + +## Rate Limit Tab + +This tab allows you to set various rate limits for your Infisical instance. You do not need to redeploy when making changes to rate limits as these will be propagated automatically. + +![Rate Limit Settings](/images/platform/admin-panels/admin-panel-rate-limits.png) + + + + Note that rate limit configuration is a paid feature. Please contact sales@infisical.com to purchase a license for its use. + + +## User Management Tab + +From this tab, you can view all the users who have signed up for your instance. You can search for users using the search bar and remove them from your instance by pressing the **X** button on their respective row. + +![User Management](/images/platform/admin-panels/admin-panel-users.png) + + + Note that rate limit configuration is a paid feature. Please contact sales@infisical.com to purchase a license for its use. + diff --git a/docs/documentation/platform/audit-log-streams/audit-log-streams-with-fluentbit.mdx b/docs/documentation/platform/audit-log-streams/audit-log-streams-with-fluentbit.mdx new file mode 100644 index 0000000000..e5411506bc --- /dev/null +++ b/docs/documentation/platform/audit-log-streams/audit-log-streams-with-fluentbit.mdx @@ -0,0 +1,61 @@ +--- +title: "Stream to Non-HTTP providers" +description: "How to stream Infisical Audit Logs to Non-HTTP log providers" +--- + + + Audit log streams is a paid feature. + + If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical, + then you should contact team@infisical.com to purchase an enterprise license to use it. + + +This guide will demonstrate how you can send Infisical Audit log streams to storage solutions that do not support direct HTTP-based ingestion, such as AWS S3. +To achieve this, you will learn how you can use a log collector like Fluent Bit to capture and forward logs from Infisical to non-HTTP storage options. +In this pattern, Fluent Bit acts as an intermediary, accepting HTTP log streams from Infisical and transforming them into a format that can be sent to your desired storage provider. + +## Overview + +Log collectors are tools used to collect, analyze, transform, and send logs to storage. +For the purposes of this guide, we will use [Fluent Bit](https://fluentbit.io) as our log collector and send logs from Infisical to AWS S3. +However, this is just a example and you can use any log collector of your choice. + +## Deploy Fluent Bit + +You can deploy Fluent Bit in one of two ways: +1. As a sidecar to your self-hosted Infisical instance +2. As a standalone service in any deployment/compute service (e.g., AWS EC2, ECS, or GCP Compute Engine) + +To view all deployment methods, visit the [Fluent Bit Getting Started guide](https://docs.fluentbit.io/manual/installation/getting-started-with-fluent-bit). + +## Configure Fluent Bit + +To set up Fluent Bit, you'll need to provide a configuration file that establishes an HTTP listener and configures an output to send JSON data to your chosen storage solution. + +The following Fluent Bit configuration sets up an HTTP listener on port `8888` and sends logs to AWS S3: + +```ini +[SERVICE] + Flush 1 + Log_Level info + Daemon off + +[INPUT] + Name http + Listen 0.0.0.0 + Port 8888 + +[OUTPUT] + Name s3 + Match * + bucket my-bucket + region us-west-2 + total_file_size 50M + use_put_object Off + compression gzip + s3_key_format /$TAG/%Y/%m/%d/%H_%M_%S.gz +``` +### Connecting Infisical Audit Log Stream + +Once Fluent Bit is set up and configured, you can point the Infisical [audit log stream](/documentation/platform/audit-log-streams/audit-log-streams) to Fluent Bit's HTTP listener, which will then forward the logs to your chosen provider. +Using this pattern, you are able to send Infisical Audit logs to various providers that do not support HTTP based log ingestion by default. diff --git a/docs/documentation/platform/audit-log-streams.mdx b/docs/documentation/platform/audit-log-streams/audit-log-streams.mdx similarity index 73% rename from docs/documentation/platform/audit-log-streams.mdx rename to docs/documentation/platform/audit-log-streams/audit-log-streams.mdx index 2a69780bc6..fdd8cc7c78 100644 --- a/docs/documentation/platform/audit-log-streams.mdx +++ b/docs/documentation/platform/audit-log-streams/audit-log-streams.mdx @@ -20,10 +20,10 @@ The logs are formatted in JSON, requiring your logging provider to support JSON- - ![stream create](../../images/platform/audit-log-streams/stream-create.png) + ![stream create](/images/platform/audit-log-streams/stream-create.png) - ![stream create](../../images/platform/audit-log-streams/stream-inputs.png) + ![stream create](/images/platform/audit-log-streams/stream-inputs.png) Provide the following values @@ -35,7 +35,7 @@ The logs are formatted in JSON, requiring your logging provider to support JSON- -![stream listt](../../images/platform/audit-log-streams/stream-list.png) +![stream listt](/images/platform/audit-log-streams/stream-list.png) Your Audit Logs are now ready to be streamed. ## Example Providers @@ -44,11 +44,11 @@ Your Audit Logs are now ready to be streamed. - ![better stack connect source](../../images/platform/audit-log-streams/betterstack-create-source.png) + ![better stack connect source](/images/platform/audit-log-streams/betterstack-create-source.png) - ![better stack connect](../../images/platform/audit-log-streams/betterstack-source-details.png) + ![better stack connect](/images/platform/audit-log-streams/betterstack-source-details.png) 1. Copy the **endpoint** from Better Stack to the **Endpoint URL** field. 3. Create a new header with key **Authorization** and set the value as **Bearer \**. @@ -59,21 +59,21 @@ Your Audit Logs are now ready to be streamed. - ![api key create](../../images/platform/audit-log-streams/datadog-api-sidebar.png) + ![api key create](/images/platform/audit-log-streams/datadog-api-sidebar.png) - ![api key form](../../images/platform/audit-log-streams/data-create-api-key.png) - ![api key form](../../images/platform/audit-log-streams/data-dog-api-key.png) + ![api key form](/images/platform/audit-log-streams/data-create-api-key.png) + ![api key form](/images/platform/audit-log-streams/data-dog-api-key.png) - ![datadog url](../../images/platform/audit-log-streams/datadog-logging-endpoint.png) + ![datadog url](/images/platform/audit-log-streams/datadog-logging-endpoint.png) 1. Navigate to the [Datadog Send Logs API documentation](https://docs.datadoghq.com/api/latest/logs/?code-lang=curl&site=us5#send-logs). 2. Pick your Datadog account region. 3. Obtain your Datadog logging endpoint URL. - ![datadog api key details](../../images/platform/audit-log-streams/datadog-source-details.png) + ![datadog api key details](/images/platform/audit-log-streams/datadog-source-details.png) 1. Copy the **logging endpoint** from Datadog to the **Endpoint URL** field. 2. Copy the **API Key** from previous step diff --git a/docs/documentation/platform/audit-logs.mdx b/docs/documentation/platform/audit-logs.mdx index be2381da2d..594c1f707b 100644 --- a/docs/documentation/platform/audit-logs.mdx +++ b/docs/documentation/platform/audit-logs.mdx @@ -1,5 +1,5 @@ --- -title: "Audit Logs" +title: "Overview" description: "Track evert event action performed within Infisical projects." --- diff --git a/docs/documentation/platform/dynamic-secrets/aws-elasticache.mdx b/docs/documentation/platform/dynamic-secrets/aws-elasticache.mdx new file mode 100644 index 0000000000..225b884cb3 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/aws-elasticache.mdx @@ -0,0 +1,144 @@ +--- +title: "AWS ElastiCache" +description: "Learn how to dynamically generate AWS ElastiCache user credentials." +--- + +The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCache credentials on demand based on configured role. + +## Prerequisites + + + +2. Create an AWS IAM user with the following permissions: +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": [ + "elasticache:DescribeUsers", + "elasticache:ModifyUser", + "elasticache:CreateUser", + "elasticache:CreateUserGroup", + "elasticache:DeleteUser", + "elasticache:DescribeReplicationGroups", + "elasticache:DescribeUserGroups", + "elasticache:ModifyReplicationGroup", + "elasticache:ModifyUserGroup" + ], + "Resource": "arn:aws:elasticache:::user:*" + } + ] +} +``` + +3. Create an access key ID and secret access key for the user you created in the previous step. You will need these to configure the Infisical dynamic secret. + + + New leases may take up-to a couple of minutes before ElastiCache has the chance to complete their configuration. + It is recommended to use a retry strategy when establishing new ElastiCache connections. + This may prevent errors when trying to use a password that isn't yet live on the targeted ElastiCache cluster. + + While a leasing is being created, you will be unable to create new leases for the same dynamic secret. + + + + Please ensure that your ElastiCache cluster has transit encryption enabled and set to required. This is required for the dynamic secret to work. + + + + + +## Set up Dynamic Secrets with AWS ElastiCache + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-aws-elasti-cache.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret. + + + + The region that the ElastiCache cluster is located in. _(e.g. us-east-1)_ + + + + This is the access key ID of the AWS IAM user you created in the prerequisites. This will be used to provision and manage the dynamic secret leases. + + + + This is the secret access key of the AWS IAM user you created in the prerequisites. This will be used to provision and manage the dynamic secret leases. + + + + A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service. + + + + + If you want to provide specific privileges for the generated dynamic credentials, you can modify the ElastiCache statement to your needs. This is useful if you want to only give access to a specific table(s). + + ![Modify ElastiCache Statements Modal](/images/platform/dynamic-secrets/modify-elasticache-statement.png) + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certificate. + + + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete a lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + \ No newline at end of file diff --git a/docs/documentation/platform/dynamic-secrets/aws-iam.mdx b/docs/documentation/platform/dynamic-secrets/aws-iam.mdx index 6ec5b48b94..7a3976e0f1 100644 --- a/docs/documentation/platform/dynamic-secrets/aws-iam.mdx +++ b/docs/documentation/platform/dynamic-secrets/aws-iam.mdx @@ -1,6 +1,6 @@ --- title: "AWS IAM" -description: "How to dynamically generate AWS IAM Users." +description: "Learn how to dynamically generate AWS IAM Users." --- The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on configured AWS policy. diff --git a/docs/documentation/platform/dynamic-secrets/azure-entra-id.mdx b/docs/documentation/platform/dynamic-secrets/azure-entra-id.mdx new file mode 100644 index 0000000000..8a71772b18 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/azure-entra-id.mdx @@ -0,0 +1,164 @@ +--- +title: "Azure Entra Id" +description: "Learn how to dynamically generate Azure Entra Id user credentials." +--- + +The Infisical Azure Entra Id dynamic secret allows you to generate Azure Entra Id credentials on demand based on configured role. + +## Prerequisites + + + +Login to [Microsoft Entra ID](https://entra.microsoft.com/) + + + +Go to Overview, Copy and store `Tenant Id` +![Copy Tenant Id](../../../images/platform/dynamic-secrets/dynamic-secret-ad-tenant-id.png) + + + +Go to Applications > App registrations. Click on New Registration. +![Copy Tenant Id](../../../images/platform/dynamic-secrets/dynamic-secret-ad-new-registration.png) + + + +Enter an application name. Click Register. + + + +Copy and store `Application Id`. +![Copy Application Id](../../../images/platform/dynamic-secrets/dynamic-secret-ad-copy-app-id.png) + + + +Go to Clients and Secrets. Click on New Client Secret. + + + +Enter a description, select expiry and click Add. + + + +Copy and store `Client Secret` value. +![Copy client Secret](../../../images/platform/dynamic-secrets/dynamic-secret-ad-add-client-secret.png) + + + +Go to API Permissions. Click on Add a permission. +![Click add a permission](../../../images/platform/dynamic-secrets/dynamic-secret-ad-add-permission.png) + + + +Click on Microsoft Graph. +![Click Microsoft Graph](../../../images/platform/dynamic-secrets/dynamic-secret-ad-select-graph.png) + + + +Click on Application Permissions. Search and select `User.ReadWrite.All` and click Add permissions. +![Add User.Read.All](../../../images/platform/dynamic-secrets/dynamic-secret-ad-select-perms.png) + + + +Click on Grant admin consent for app. Click yes to confirm. +![Grant admin consent](../../../images/platform/dynamic-secrets/dynamic-secret-ad-admin-consent.png) + + + +Go to Dashboard. Click on show more. +![Show more](../../../images/platform/dynamic-secrets/dynamic-secret-ad-show-more.png) + + + +Click on Roles & admins. Search for User Administrator and click on it. +![User Administrator](../../../images/platform/dynamic-secrets/dynamic-secret-ad-user-admin.png) + + + +Click on Add assignments. Search for the application name you created and select it. Click on Add. +![Add assignments](../../../images/platform/dynamic-secrets/dynamic-secret-ad-add-assignments.png) + + + +## Set up Dynamic Secrets with Azure Entra ID + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-ad-modal.png) + + + + Prefix for the secrets to be created + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret. + + + + The Tenant ID of your Azure Entra ID account. + + + + The Application ID of the application you created in Azure Entra ID. + + + + The Client Secret of the application you created in Azure Entra ID. + + + + Multi select list of users to generate secrets for. + + + + + After submitting the form, you will see a dynamic secrets for each user created in the dashboard. + + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-ad-lease.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete a lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + diff --git a/docs/documentation/platform/dynamic-secrets/cassandra.mdx b/docs/documentation/platform/dynamic-secrets/cassandra.mdx index 78e03e0116..fd46c8288e 100644 --- a/docs/documentation/platform/dynamic-secrets/cassandra.mdx +++ b/docs/documentation/platform/dynamic-secrets/cassandra.mdx @@ -1,6 +1,6 @@ --- title: "Cassandra" -description: "How to dynamically generate Cassandra database users." +description: "Learn how to dynamically generate Cassandra database user credentials" --- The Infisical Cassandra dynamic secret allows you to generate Cassandra database credentials on demand based on configured role. diff --git a/docs/documentation/platform/dynamic-secrets/elastic-search.mdx b/docs/documentation/platform/dynamic-secrets/elastic-search.mdx new file mode 100644 index 0000000000..0b2897790c --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/elastic-search.mdx @@ -0,0 +1,127 @@ +--- +title: "Elasticsearch" +description: "Learn how to dynamically generate Elasticsearch user credentials." +--- + +The Infisical Elasticsearch dynamic secret allows you to generate Elasticsearch credentials on demand based on configured role. + +## Prerequisites + + + +1. Create a role with at least `manage_security` and `monitor` permissions. +2. Assign the newly created role to your API key or user that you'll use later in the dynamic secret configuration. + + + For testing purposes, you can also use a highly privileged role like `superuser`, that will have full control over the cluster. This is not recommended in production environments following the principle of least privilege. + + +## Set up Dynamic Secrets with Elasticsearch + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-elastic-search.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret. + + + + Your Elasticsearch host. This is the endpoint that your instance runs on. _(Example: https://your-cluster-ip)_ + + + + The port that your Elasticsearch instance is running on. _(Example: 9200)_ + + + + The roles that the new user that is created when a lease is provisioned will be assigned to. This is a required field. This defaults to `superuser`, which is highly privileged. It is recommended to create a new role with the least privileges required for the lease. + + + + Select the authentication method you want to use to connect to your Elasticsearch instance. + + + + The username of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method. + + + + The password of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method. + + + + The ID of the API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method. + + + + The API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method. + + + + A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service. + + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-input-modal-elastic-search.png) + + + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certificate. + + + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete a lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + diff --git a/docs/documentation/platform/dynamic-secrets/ldap.mdx b/docs/documentation/platform/dynamic-secrets/ldap.mdx new file mode 100644 index 0000000000..ac06a75768 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/ldap.mdx @@ -0,0 +1,278 @@ +--- +title: "LDAP" +description: "Learn how to dynamically generate user credentials via LDAP." +--- + +The Infisical LDAP dynamic secret allows you to generate user credentials on demand via LDAP. The integration is general to any LDAP implementation but has been tested with OpenLDAP and Active directory as of now. + +## Prerequisites + +1. Create a user with the necessary permissions to create users in your LDAP server. +2. Ensure your LDAP server is reachable via Infisical instance. + +## Create LDAP Credentials + + + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-ldap-select.png) + + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret. + + + + LDAP url to connect to. _(Example: ldap://your-ldap-ip:389 or ldaps://domain:636)_ + + + + DN to bind to. This should have permissions to create a new users. + + + + Password for the given DN. + + + + CA certificate to use for TLS in case of a secure connection. + + + + The type of LDAP credential - select Dynamic. + + + + LDIF to run while creating a user in LDAP. This can include extra steps to assign the user to groups or set permissions. + Here `{{Username}}`, `{{Password}}` and `{{EncodedPassword}}` are templatized variables for the username and password generated by the dynamic secret. + + `{{EncodedPassword}}` is the encoded password required for the `unicodePwd` field in Active Directory as described [here](https://learn.microsoft.com/en-us/troubleshoot/windows-server/active-directory/change-windows-active-directory-user-password). + + **OpenLDAP** Example: + ``` + dn: uid={{Username}},dc=infisical,dc=com + changetype: add + objectClass: top + objectClass: person + objectClass: organizationalPerson + objectClass: inetOrgPerson + cn: John Doe + sn: Doe + uid: jdoe + mail: jdoe@infisical.com + userPassword: {{Password}} + ``` + + **Active Directory** Example: + ``` + dn: CN={{Username}},OU=Test Create,DC=infisical,DC=com + changetype: add + objectClass: top + objectClass: person + objectClass: organizationalPerson + objectClass: user + userPrincipalName: {{Username}}@infisical.com + sAMAccountName: {{Username}} + unicodePwd::{{EncodedPassword}} + userAccountControl: 66048 + + dn: CN=test-group,OU=Test Create,DC=infisical,DC=com + changetype: modify + add: member + member: CN={{Username}},OU=Test Create,DC=infisical,DC=com + - + ``` + + + + LDIF to run while revoking a user in LDAP. This can include extra steps to remove the user from groups or set permissions. + Here `{{Username}}` is a templatized variable for the username generated by the dynamic secret. + + **OpenLDAP / Active Directory** Example: + ``` + dn: CN={{Username}},OU=Test Create,DC=infisical,DC=com + changetype: delete + ``` + + + + LDIF to run incase Creation LDIF fails midway. + + For the creation example shown above, if the user is created successfully but not added to a group, this LDIF can be used to remove the user. + Here `{{Username}}`, `{{Password}}` and `{{EncodedPassword}}` are templatized variables for the username generated by the dynamic secret. + + **OpenLDAP / Active Directory** Example: + ``` + dn: CN={{Username}},OU=Test Create,DC=infisical,DC=com + changetype: delete + ``` + + + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you with an array of DN's altered depending on the Creation LDIF. + + ![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-ldap-lease.png) + + + + + + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-ldap-select.png) + + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret. + + + + LDAP url to connect to. _(Example: ldap://your-ldap-ip:389 or ldaps://domain:636)_ + + + + DN to bind to. This should have permissions to create a new users. + + + + Password for the given DN. + + + + CA certificate to use for TLS in case of a secure connection. + + + + The type of LDAP credential - select Static. + + + + LDIF to run for rotating the credentals of an LDAP user. This can include extra LDAP steps based on your needs. + Here `{{Password}}` and `{{EncodedPassword}}` are templatized variables for the password generated by the dynamic secret. + + Note that the `-` characters and the empty lines found at the end of the examples are necessary based on the LDIF format. + + **OpenLDAP** Example: + ``` + dn: cn=sheencaps capadngan,ou=people,dc=acme,dc=com + changetype: modify + replace: userPassword + password: {{Password}} + - + + ``` + + **Active Directory** Example: + ``` + dn: cn=sheencaps capadngan,ou=people,dc=acme,dc=com + changetype: modify + replace: unicodePwd + unicodePwd::{{EncodedPassword}} + - + + ``` + `{{EncodedPassword}}` is the encoded password required for the `unicodePwd` field in Active Directory as described [here](https://learn.microsoft.com/en-us/troubleshoot/windows-server/active-directory/change-windows-active-directory-user-password). + + + + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you with an array of DN's altered depending on the Creation LDIF. + + ![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-ldap-lease.png) + + + + + + + +## Active Directory Integration + +- Passwords in Active Directory are set using the `unicodePwd` field. This must be proceeded by two colons `::` as shown in the example. [Source](https://learn.microsoft.com/en-us/troubleshoot/windows-server/active-directory/change-windows-active-directory-user-password) +- Active directory uses the `userAccountControl` field to enable account. [Read More](https://learn.microsoft.com/en-us/troubleshoot/windows-server/active-directory/useraccountcontrol-manipulate-account-properties) + - `userAccountControl` set to `512` enables a user. + - To disable AD's password expiration for this dynamic user account. The `userAccountControl` value for this is: `65536`. + - Since `userAccountControl` flag is cumulative set it to `512 + 65536` = `66048` to do both. +- Active Directory does not permit direct modification of a user's `memberOf` attribute. The member attribute of a group and the `memberOf` attribute of a user are [linked attributes](https://learn.microsoft.com/en-us/windows/win32/ad/linked-attributes), where the member attribute represents the forward link, which can be modified. In the context of AD group membership, the group's `member` attribute serves as the forward link. Therefore, to add a newly created dynamic user to a group, a modification request must be issued to the desired group, updating its membership to include the new user. + +## LDIF Entries + +User account management is handled through **LDIF entries**. + +#### Things to Remember + +- **No trailing spaces:** Ensure there are no trailing spaces on any line, including blank lines. +- **Empty lines before modify blocks:** Every modify block must be preceded by an empty line. +- **Multiple modifications:** You can define multiple modifications for a DN within a single modify block. Each modification should end with a single dash (`-`). diff --git a/docs/documentation/platform/dynamic-secrets/mongo-atlas.mdx b/docs/documentation/platform/dynamic-secrets/mongo-atlas.mdx new file mode 100644 index 0000000000..f9352f2e5c --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/mongo-atlas.mdx @@ -0,0 +1,114 @@ +--- +title: "Mongo Atlas" +description: "Learn how to dynamically generate Mongo Atlas Database user credentials." +--- + +The Infisical Mongo Atlas dynamic secret allows you to generate Mongo Atlas Database credentials on demand based on configured role. + +## Prerequisite +Create a project scopped API Key with the required permission in your Mongo Atlas following the [official doc](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-a-project). + + + The API Key must have permission to manage users in the project. + + +## Set up Dynamic Secrets with Mongo Atlas + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-atlas-modal.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret + + + + The public key of your generated Atlas API Key. This acts as a username. + + + + The private key of your generated Atlas API Key. This acts as a password. + + + + Unique 24-hexadecimal digit string that identifies your project. This is same as project id + + + + List that provides the pairings of one role with one applicable database. + - **Database Name**: Database to which the user is granted access privileges. + - **Collection**: Collection on which this role applies. + - **Role Name**: Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role. + - Enum: `atlasAdmin` `backup` `clusterMonitor` `dbAdmin` `dbAdminAnyDatabase` `enableSharding` `read` `readAnyDatabase` `readWrite` `readWriteAnyDatabase` ``. + + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-atlas.png) + + + + List that contains clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances that this database user can access. If omitted, MongoDB Cloud grants the database user access to all the clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances in the project. + + ![Modify Scope Modal](../../../images/platform/dynamic-secrets/advanced-option-atlas.png) + - **Label**: Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access. + - **Type**: Category of resource that this database user can access. + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certficate. + + + ![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png) + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete a lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + diff --git a/docs/documentation/platform/dynamic-secrets/mongo-db.mdx b/docs/documentation/platform/dynamic-secrets/mongo-db.mdx new file mode 100644 index 0000000000..f34d578dc5 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/mongo-db.mdx @@ -0,0 +1,116 @@ +--- +title: "Mongo DB" +description: "Learn how to dynamically generate Mongo DB Database user credentials." +--- + +The Infisical Mongo DB dynamic secret allows you to generate Mongo DB Database credentials on demand based on configured role. + + + + If your using Mongo Atlas, please use [Atlas Dynamic Secret](./mongo-atlas) as MongoDB commands are not supported by atlas. + + +## Prerequisite +Create a user with the required permission in your MongoDB instance. This user will be used to create new accounts on-demand. + +## Set up Dynamic Secrets with Mongo DB + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-mongodb.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret + + + + Database host URL. + + + + Database port number. If your Mongo DB is cluster you can omit this. + + + + Username of the admin user that will be used to create dynamic secrets + + + + Password of the admin user that will be used to create dynamic secrets + + + + Name of the database for which you want to create dynamic secrets + + + + Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role. + - Enum: `atlasAdmin` `backup` `clusterMonitor` `dbAdmin` `dbAdminAnyDatabase` `enableSharding` `read` `readAnyDatabase` `readWrite` `readWriteAnyDatabase` ``. + + + + A CA may be required if your DB requires it for incoming connections. + + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-mongodb.png) + + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certificate. + + + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete a lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + diff --git a/docs/documentation/platform/dynamic-secrets/mssql.mdx b/docs/documentation/platform/dynamic-secrets/mssql.mdx new file mode 100644 index 0000000000..fb666adca6 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/mssql.mdx @@ -0,0 +1,118 @@ +--- +title: "MS SQL" +description: "Learn how to dynamically generate MS SQL database user credentials." +--- + +The Infisical MS SQL dynamic secret allows you to generate Microsoft SQL server database credentials on demand based on configured role. + +## Prerequisite + +Create a user with the required permission in your SQL instance. This user will be used to create new accounts on-demand. + + +## Set up Dynamic Secrets with MS SQL + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret + + + + Choose the service you want to generate dynamic secrets for. This must be selected as **MS SQL**. + + + + Database host + + + + Database port + + + + Username that will be used to create dynamic secrets + + + + Password that will be used to create dynamic secrets + + + + Name of the database for which you want to create dynamic secrets + + + + A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions). + + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-mssql.png) + + + + If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s). + + ![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sql-statements-mssql.png) + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certficate. + + + ![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png) + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete the lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + diff --git a/docs/documentation/platform/dynamic-secrets/mysql.mdx b/docs/documentation/platform/dynamic-secrets/mysql.mdx index c64edab636..d85f4b7bb9 100644 --- a/docs/documentation/platform/dynamic-secrets/mysql.mdx +++ b/docs/documentation/platform/dynamic-secrets/mysql.mdx @@ -1,6 +1,6 @@ --- title: "MySQL" -description: "Learn how to dynamically generate MySQL Database user passwords." +description: "Learn how to dynamically generate MySQL Database user credentials." --- The Infisical MySQL dynamic secret allows you to generate MySQL Database credentials on demand based on configured role. diff --git a/docs/documentation/platform/dynamic-secrets/oracle.mdx b/docs/documentation/platform/dynamic-secrets/oracle.mdx index 05b832c4fe..a6fb68913f 100644 --- a/docs/documentation/platform/dynamic-secrets/oracle.mdx +++ b/docs/documentation/platform/dynamic-secrets/oracle.mdx @@ -1,6 +1,6 @@ --- title: "Oracle" -description: "Learn how to dynamically generate Oracle Database user passwords." +description: "Learn how to dynamically generate Oracle Database user credentials." --- The Infisical Oracle dynamic secret allows you to generate Oracle Database credentials on demand based on configured role. diff --git a/docs/documentation/platform/dynamic-secrets/overview.mdx b/docs/documentation/platform/dynamic-secrets/overview.mdx index 81ab42656f..24c7fae4e1 100644 --- a/docs/documentation/platform/dynamic-secrets/overview.mdx +++ b/docs/documentation/platform/dynamic-secrets/overview.mdx @@ -32,4 +32,5 @@ Dynamic secrets are particularly useful in environments with stringent security 2. [MySQL](./mysql) 3. [Cassandra](./cassandra) 4. [Oracle](./oracle) +6. [Redis](./redis) 5. [AWS IAM](./aws-iam) diff --git a/docs/documentation/platform/dynamic-secrets/postgresql.mdx b/docs/documentation/platform/dynamic-secrets/postgresql.mdx index 13adfc7502..ebc19b0119 100644 --- a/docs/documentation/platform/dynamic-secrets/postgresql.mdx +++ b/docs/documentation/platform/dynamic-secrets/postgresql.mdx @@ -1,6 +1,6 @@ --- title: "PostgreSQL" -description: "How to dynamically generate PostgreSQL database users." +description: "Learn how to dynamically generate PostgreSQL database users." --- The Infisical PostgreSQL dynamic secret allows you to generate PostgreSQL database credentials on demand based on configured role. diff --git a/docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx b/docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx new file mode 100644 index 0000000000..f8649b7272 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx @@ -0,0 +1,116 @@ +--- +title: "RabbitMQ" +description: "Learn how to dynamically generate RabbitMQ user credentials." +--- + +The Infisical RabbitMQ dynamic secret allows you to generate RabbitMQ credentials on demand based on configured role. + +## Prerequisites + +1. Ensure that the `management` plugin is enabled on your RabbitMQ instance. This is required for the dynamic secret to work. + + +## Set up Dynamic Secrets with RabbitMQ + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-rabbit-mq.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret. + + + + Your RabbitMQ host. This must be in HTTP format. _(Example: http://your-cluster-ip)_ + + + + The port that the RabbitMQ management plugin is listening on. This is `15672` by default. + + + + The name of the virtual host that the user will be assigned to. This defaults to `/`. + + + + The permissions that the user will have on the virtual host. This defaults to `.*`. + + The three permission fields all take a regular expression _(regex)_, that should match resource names for which the user is granted read / write / configuration permissions + + + + + The username of the user that will be used to provision new dynamic secret leases. + + + + The password of the user that will be used to provision new dynamic secret leases. + + + + A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service. + + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-input-modal-rabbit-mq.png) + + + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certificate. + + + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete a lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + diff --git a/docs/documentation/platform/dynamic-secrets/redis.mdx b/docs/documentation/platform/dynamic-secrets/redis.mdx new file mode 100644 index 0000000000..cb2e6a17eb --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/redis.mdx @@ -0,0 +1,106 @@ +--- +title: "Redis" +description: "Learn how to dynamically generate Redis Database user credentials." +--- + +The Infisical Redis dynamic secret allows you to generate Redis Database credentials on demand based on configured role. + +## Prerequisite +Create a user with the required permission in your Redis instance. This user will be used to create new accounts on-demand. + + +## Set up Dynamic Secrets with Redis + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-redis.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret. + + + + The database host, this can be an IP address or a domain name as long as Infisical can reach it. + + + + The database port, this is the port that the Redis instance is listening on. + + + + Redis username that will be used to create new users on-demand. This is often 'default' or 'admin'. + + + + Password that will be used to create dynamic secrets. This is required if your Redis instance is password protected. + + + + A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service. + + + + + If you want to provide specific privileges for the generated dynamic credentials, you can modify the Redis statement to your needs. This is useful if you want to only give access to a specific table(s). + + ![Modify Redis Statements Modal](/images/platform/dynamic-secrets/modify-redis-statement.png) + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certificate. + + + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + +## Audit or Revoke Leases +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the expiration time of the lease or delete a lease before it's set time to live. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases +To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret + \ No newline at end of file diff --git a/docs/documentation/platform/dynamic-secrets/sap-hana.mdx b/docs/documentation/platform/dynamic-secrets/sap-hana.mdx new file mode 100644 index 0000000000..3c2a837d30 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/sap-hana.mdx @@ -0,0 +1,121 @@ +--- +title: "SAP HANA" +description: "Learn how to dynamically generate SAP HANA database account credentials." +--- + +The Infisical SAP HANA dynamic secret allows you to generate SAP HANA database credentials on demand. + +## Prerequisite + +- Infisical requires a SAP HANA database user in your instance with the necessary permissions. This user will facilitate the creation of new accounts as needed. + Ensure the user possesses privileges for creating, dropping, and granting permissions to roles for it to be able to create dynamic secrets. + +- The SAP HANA instance should be reachable by Infisical. + +## Set up Dynamic Secrets with SAP HANA + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-sap-hana.png) + + + + Name by which you want the secret to be referenced + + + + Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate) + + + + Maximum time-to-live for a generated secret + + + + SAP HANA Host + + + + + SAP HANA Port + + + + Username that will be used to create dynamic secrets + + + + Password that will be used to create dynamic secrets + + + + A CA may be required for SSL if you are self-hosting SAP HANA + + + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-sap-hana.png) + + + + If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. + ![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sap-hana-sql-statements.png) + + + Due to SAP HANA limitations, the attached SQL statements are not executed as a transaction. + + + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + If this step fails, you may have to add the CA certficate. + + + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret in step 4. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + + +## Audit or Revoke Leases + +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the lease details and delete the lease ahead of its expiration time. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases + +To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic + secret. + diff --git a/docs/documentation/platform/dynamic-secrets/snowflake.mdx b/docs/documentation/platform/dynamic-secrets/snowflake.mdx new file mode 100644 index 0000000000..f5e06ba768 --- /dev/null +++ b/docs/documentation/platform/dynamic-secrets/snowflake.mdx @@ -0,0 +1,124 @@ +--- +title: "Snowflake" +description: "Learn how to dynamically generate Snowflake user credentials." +--- + +Infisical's Snowflake dynamic secrets allow you to generate Snowflake user credentials on demand. + +## Snowflake Prerequisites + + + Infisical requires a Snowflake user in your account with the USERADMIN role. This user will act as a service account for Infisical and facilitate the creation of new users as needed. + + + + + ![Snowflake User Dashboard](/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-users-page.png) + + + + Be sure to uncheck "Force user to change password on first time login" + + ![Snowflake Create Service User](/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-create-service-user.png) + + + ![Snowflake Account And Organization Identifiers](/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-identifiers.png) + + + +## Set up Dynamic Secrets with Snowflake + + + + Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret. + + + ![Add Dynamic Secret Button](/images/platform/dynamic-secrets/add-dynamic-secret-button.png) + + + ![Dynamic Secret Modal](/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-modal.png) + + + + The name you want to reference this secret by + + + + Default time-to-live for a generated secret (it is possible to modify this value when generating a secret) + + + + Maximum time-to-live for a generated secret + + + + Snowflake account identifier + + + + Snowflake organization identifier + + + + Username of the Infisical Service User + + + + Password of the Infisical Service User + + + ![Dynamic Secret Setup Modal](/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-setup-modal.png) + + + + If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL + statement to your needs. + ![Modify SQL Statements Modal](/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-sql-statements.png) + + + After submitting the form, you will see a dynamic secret created in the dashboard. + + + Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials. + To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item. + Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret + lease list section. + + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png) + ![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png) + + When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how + long the credentials are valid for. + + ![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png) + + + Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret in + step 4. + + + + Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be + shown to you. + + ![Provision Lease](/images/platform/dynamic-secrets/lease-values.png) + + + + +## Audit or Revoke Leases + +Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard. +This will allow you see the lease details and delete the lease ahead of its expiration time. + +![Provision Lease](/images/platform/dynamic-secrets/lease-data.png) + +## Renew Leases + +To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** button as illustrated below. +![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png) + + + Lease renewals cannot exceed the maximum TTL set when configuring the dynamic + secret. + diff --git a/docs/documentation/platform/identities/aws-auth.mdx b/docs/documentation/platform/identities/aws-auth.mdx index 3eb094cc46..494606ccd7 100644 --- a/docs/documentation/platform/identities/aws-auth.mdx +++ b/docs/documentation/platform/identities/aws-auth.mdx @@ -7,7 +7,7 @@ description: "Learn how to authenticate with Infisical for EC2 instances, Lambda ## Diagram -The following sequence digram illustrates the AWS Auth workflow for authenticating AWS IAM principals with Infisical. +The following sequence diagram illustrates the AWS Auth workflow for authenticating AWS IAM principals with Infisical. ```mermaid sequenceDiagram @@ -75,7 +75,14 @@ access the Infisical API using the AWS Auth authentication method. - Name (required): A friendly name for the identity. - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. - Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **AWS Auth**. + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use AWS Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new AWS Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) ![identities create aws auth method](/images/platform/identities/identities-org-create-aws-auth-method.png) diff --git a/docs/documentation/platform/identities/azure-auth.mdx b/docs/documentation/platform/identities/azure-auth.mdx index 3ac9577525..03d997ffb8 100644 --- a/docs/documentation/platform/identities/azure-auth.mdx +++ b/docs/documentation/platform/identities/azure-auth.mdx @@ -7,7 +7,7 @@ description: "Learn how to authenticate with Infisical for services on Azure" ## Diagram -The following sequence digram illustrates the Azure Auth workflow for authenticating Azure [service principals](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser) with Infisical. +The following sequence diagram illustrates the Azure Auth workflow for authenticating Azure [service principals](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser) with Infisical. ```mermaid sequenceDiagram @@ -75,7 +75,14 @@ access the Infisical API using the Azure Auth authentication method. - Name (required): A friendly name for the identity. - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. - Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Azure Auth**. + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use Azure Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new Azure Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) ![identities create azure auth method](/images/platform/identities/identities-org-create-azure-auth-method.png) diff --git a/docs/documentation/platform/identities/gcp-auth.mdx b/docs/documentation/platform/identities/gcp-auth.mdx index c836a946d2..6573544ded 100644 --- a/docs/documentation/platform/identities/gcp-auth.mdx +++ b/docs/documentation/platform/identities/gcp-auth.mdx @@ -13,7 +13,7 @@ description: "Learn how to authenticate with Infisical for services on Google Cl ## Diagram - The following sequence digram illustrates the GCP ID Token Auth workflow for authenticating GCP resources with Infisical. + The following sequence diagram illustrates the GCP ID Token Auth workflow for authenticating GCP resources with Infisical. ```mermaid sequenceDiagram @@ -81,7 +81,14 @@ access the Infisical API using the GCP ID Token authentication method. - Name (required): A friendly name for the identity. - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. - Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **GCP Auth** and set the **Type** to **GCP ID Token Auth**. + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use GCP Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new GCP Auth configuration onto the identity; set the **Type** field to **GCP ID Token Auth**. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) ![identities create gcp auth method](/images/platform/identities/identities-org-create-gcp-gce-auth-method.png) @@ -175,7 +182,7 @@ access the Infisical API using the GCP ID Token authentication method. ## Diagram - The following sequence digram illustrates the GCP IAM Auth workflow for authenticating GCP IAM service accounts with Infisical. + The following sequence diagram illustrates the GCP IAM Auth workflow for authenticating GCP IAM service accounts with Infisical. ```mermaid sequenceDiagram @@ -243,9 +250,16 @@ access the Infisical API using the GCP IAM authentication method. - Name (required): A friendly name for the identity. - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. - Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **GCP IAM Auth** and set the **Type** to **GCP IAM Auth**. + Once you've created an identity, you'll be redirected to a page where you can manage the identity. - ![identities create gcp auth method](/images/platform/identities/identities-org-create-gcp-iam-auth-method.png) + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use GCP Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new GCP Auth configuration onto the identity; set the **Type** field to **GCP IAM Auth**. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) + + ![identities organization create token auth method](/images/platform/identities/identities-org-create-gcp-iam-auth-method.png) Here's some more guidance on each field: diff --git a/docs/documentation/platform/identities/kubernetes-auth.mdx b/docs/documentation/platform/identities/kubernetes-auth.mdx index b154f36f6b..b4d7cc1ac5 100644 --- a/docs/documentation/platform/identities/kubernetes-auth.mdx +++ b/docs/documentation/platform/identities/kubernetes-auth.mdx @@ -7,7 +7,7 @@ description: "Learn how to authenticate with Infisical in Kubernetes" ## Diagram - The following sequence digram illustrates the Kubernetes Auth workflow for authenticating applications running in pods with Infisical. + The following sequence diagram illustrates the Kubernetes Auth workflow for authenticating applications running in pods with Infisical. ```mermaid sequenceDiagram @@ -42,9 +42,9 @@ To be more specific: 4. If all is well, Infisical returns a short-lived access token that the application can use to make authenticated requests to the Infisical API. -We recommend using one of Infisical's clients like SDKs or the Infisical Agent -to authenticate with Infisical using Kubernetes Auth as they handle the -authentication process including service account credential retrieval for you. + We recommend using one of Infisical's clients like SDKs or the Infisical Agent + to authenticate with Infisical using Kubernetes Auth as they handle the + authentication process including service account credential retrieval for you. ## Guide @@ -137,9 +137,16 @@ In the following steps, we explore how to create and use identities for your app - Name (required): A friendly name for the identity. - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. - Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**. + Once you've created an identity, you'll be redirected to a page where you can manage the identity. - ![identities organization create auth method](/images/platform/identities/identities-org-create-kubernetes-auth-method.png) + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use Kubernetes Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new Kubernetes Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) + + ![identities organization create kubernetes auth method](/images/platform/identities/identities-org-create-kubernetes-auth-method.png) Here's some more guidance on each field: @@ -240,8 +247,8 @@ In the following steps, we explore how to create and use identities for your app In certain cases, you may want to extend the lifespan of an access token; to do so, you must set a max TTL parameter. -A token can be renewed any number of time and each call to renew it will extend the toke life by increments of access token TTL. -Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation +A token can be renewed any number of times where each call to renew it can extend the token's lifetime by increments of the access token's TTL. +Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation. diff --git a/docs/documentation/platform/identities/machine-identities.mdx b/docs/documentation/platform/identities/machine-identities.mdx index 9cc6c4c3d5..b4a18708c9 100644 --- a/docs/documentation/platform/identities/machine-identities.mdx +++ b/docs/documentation/platform/identities/machine-identities.mdx @@ -7,7 +7,7 @@ description: "Learn how to use Machine Identities to programmatically interact w An Infisical machine identity is an entity that represents a workload or application that require access to various resources in Infisical. This is conceptually similar to an IAM user in AWS or service account in Google Cloud Platform (GCP). -Each identity must authenticate with the Infisical API using a supported authentication method like [Universal Auth](/documentation/platform/identities/universal-auth), [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth), [AWS Auth](/documentation/platform/identities/aws-auth), [Azure Auth](/documentation/platform/identities/azure-auth), or [GCP Auth](/documentation/platform/identities/gcp-auth) to get back a short-lived access token to be used in subsequent requests. +Each identity must authenticate with the Infisical API using a supported authentication method like [Token Auth](/documentation/platform/identities/token-auth), [Universal Auth](/documentation/platform/identities/universal-auth), [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth), [AWS Auth](/documentation/platform/identities/aws-auth), [Azure Auth](/documentation/platform/identities/azure-auth), or [GCP Auth](/documentation/platform/identities/gcp-auth) to get back a short-lived access token to be used in subsequent requests. ![Organization Identities](/images/platform/organization/organization-machine-identities.png) @@ -20,28 +20,23 @@ Key Features: A typical workflow for using identities consists of four steps: -1. Creating the identity with a name and [role](/documentation/platform/role-based-access-controls) in Organization Access Control > Machine Identities. +1. Creating the identity with a name and [role](/documentation/platform/access-controls/role-based-access-controls) in Organization Access Control > Machine Identities. This step also involves configuring an authentication method for it. 2. Adding the identity to the project(s) you want it to have access to. 3. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back. 4. Authenticating subsequent requests with the Infisical API using the short-lived access token. - - Currently, identities can only be used to make authenticated requests to the Infisical API, SDKs, Terraform, Kubernetes Operator, and Infisical Agent. They do not work with clients such as CLI, Ansible look up plugin, etc. - -Machine Identity support for the rest of the clients is planned to be released in the current quarter. - - - ## Authentication Methods -To interact with various resources in Infisical, Machine Identities are able to authenticate using: +To interact with various resources in Infisical, Machine Identities can authenticate with the Infisical API using: -- [Universal Auth](/documentation/platform/identities/universal-auth): A platform-agnostic authentication method that can be configured on an identity suitable to authenticate from any platform/environment. -- [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth): A Kubernetes-native authentication method for applications (e.g. pods) to authenticate with Infisical. -- [AWS Auth](/documentation/platform/identities/aws-auth): An AWS-native authentication method for AWS services (e.g. EC2, Lambda functions, etc.) to authenticate with Infisical. -- [Azure Auth](/documentation/platform/identities/azure-auth): An Azure-native authentication method for Azure resources (e.g. Azure VMs, Azure App Services, Azure Functions, Azure Kubernetes Service, etc.) to authenticate with Infisical. -- [GCP Auth](/documentation/platform/identities/gcp-auth): A GCP-native authentication method for GCP resources (e.g. Compute Engine, App Engine, Cloud Run, Google Kubernetes Engine, IAM service accounts, etc.) to authenticate with Infisical. +- [Token Auth](/documentation/platform/identities/token-auth): A platform-agnostic, simple authentication method suitable to authenticate with Infisical using a token. +- [Universal Auth](/documentation/platform/identities/universal-auth): A platform-agnostic authentication method suitable to authenticate with Infisical using a Client ID and Client Secret. +- [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth): A Kubernetes-native authentication method for applications (e.g. pods). +- [AWS Auth](/documentation/platform/identities/aws-auth): An AWS-native authentication method for AWS services (e.g. EC2, Lambda functions, etc.). +- [Azure Auth](/documentation/platform/identities/azure-auth): An Azure-native authentication method for Azure resources (e.g. Azure VMs, Azure App Services, Azure Functions, Azure Kubernetes Service, etc.). +- [GCP Auth](/documentation/platform/identities/gcp-auth): A GCP-native authentication method for GCP resources (e.g. Compute Engine, App Engine, Cloud Run, Google Kubernetes Engine, IAM service accounts, etc.). +- [OIDC Auth](/documentation/platform/identities/oidc-auth): A platform-agnostic, JWT-based authentication method for workloads using an OpenID Connect identity provider. ## FAQ @@ -59,6 +54,8 @@ You can learn more about how to do this in the CLI quickstart [here](/cli/usage) Amongst many differences, identities provide broader access over the Infisical API, utilizes the same permission system as user identities, and come with a significantly larger number of configurable authentication and security features. + + If you're looking for a simple authentication method, similar to service tokens, that can be bound onto an identity, we recommend checking out [Token Auth](/documentation/platform/identities/token-auth). There are a few reasons for why this might happen: diff --git a/docs/documentation/platform/identities/oidc-auth/circleci.mdx b/docs/documentation/platform/identities/oidc-auth/circleci.mdx new file mode 100644 index 0000000000..ddf74e3fa5 --- /dev/null +++ b/docs/documentation/platform/identities/oidc-auth/circleci.mdx @@ -0,0 +1,174 @@ +--- +title: CircleCI +description: "Learn how to authenticate CircleCI jobs with Infisical using OpenID Connect (OIDC)." +--- + +**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect. + +## Diagram + +The following sequence diagram illustrates the OIDC Auth workflow for authenticating CircleCI jobs with Infisical. + +```mermaid +sequenceDiagram + participant Client as CircleCI Job + participant Idp as CircleCI Identity Provider + participant Infis as Infisical + + Idp->>Client: Step 1: Inject JWT with verifiable claims + + Note over Client,Infis: Step 2: Login Operation + Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login + + Note over Infis,Idp: Step 3: Query verification + Infis->>Idp: Request JWT public key using OIDC Discovery + Idp-->>Infis: Return public key + + Note over Infis: Step 4: JWT validation + Infis->>Client: Return short-lived access token + + Note over Client,Infis: Step 5: Access Infisical API with Token + Client->>Infis: Make authenticated requests using the short-lived access token +``` + +## Concept + +At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful, +then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API. + +To be more specific: + +1. CircleCI provides the running job with a valid OIDC token specific to the execution. +2. The CircleCI OIDC token is sent to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint. +3. Infisical fetches the public key that was used to sign the identity token provided by CircleCI. +4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria. +5. If all is well, Infisical returns a short-lived access token that CircleCI jobs can use to make authenticated requests to the Infisical API. + +Infisical needs network-level access to the CircleCI servers. + +## Guide + +In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method. + + + + To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**. + + ![identities organization](/images/platform/identities/identities-org.png) + + When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles. + + ![identities organization create](/images/platform/identities/identities-org-create.png) + + Now input a few details for your new identity. Here's some guidance for each field: + + - Name (required): A friendly name for the identity. + - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. + + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) + + ![identities create oidc auth method](/images/platform/identities/identities-org-create-oidc-auth-method.png) + + Restrict access by configuring the Subject, Audiences, and Claims fields + + Here's some more guidance on each field: + - OIDC Discovery URL: The URL used to retrieve the OpenID Connect configuration from the identity provider. This will be used to fetch the public key needed for verifying the provided JWT. This should be set to `https://oidc.circleci.com/org/` where `organization_id` refers to the CircleCI organization where the job is being run. + - Issuer: The unique identifier of the identity provider issuing the JWT. This value is used to verify the iss (issuer) claim in the JWT to ensure the token is issued by a trusted provider. This should be set to `https://oidc.circleci.com/org/` as well. + - CA Certificate: The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints. This can be left as blank. + - Subject: The expected principal that is the subject of the JWT. The format of the sub field for CircleCI OIDC tokens is `org//project//user/` where organization_id, project_id, and user_id are UUIDs that identify the CircleCI organization, project, and user, respectively. The user is the CircleCI user that caused this job to run. + - Audiences: A list of intended recipients. This value is checked against the aud (audience) claim in the token. Set this to the CircleCI `organization_id` corresponding to where the job is running. + - Claims: Additional information or attributes that should be present in the JWT for it to be valid. Refer to CircleCI's [documentation](https://circleci.com/docs/openid-connect-tokens) for the complete list of supported claims. + - Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses. + - Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address. + For more details on the appropriate values for the OIDC fields, refer to CircleCI's [documentation](https://circleci.com/docs/openid-connect-tokens). + The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible. + + + To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project. + + To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**. + + Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to. + + ![identities project](/images/platform/identities/identities-project.png) + + ![identities project create](/images/platform/identities/identities-project-create.png) + + + The following is an example of how to use the `$CIRCLE_OIDC_TOKEN` with the Infisical [terraform provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs) to manage resources in a CircleCI pipeline. + + ```yml config.yml + version: 2.1 + + jobs: + terraform-apply: + docker: + - image: hashicorp/terraform:latest + + steps: + - checkout + - run: + command: | + export INFISICAL_AUTH_JWT="$CIRCLE_OIDC_TOKEN" + terraform init + terraform apply -auto-approve + + workflows: + version: 2 + build-and-test: + jobs: + - terraform-apply + ``` + The Infisical terraform provider expects the `INFISICAL_AUTH_JWT` environment variable to be set to the CircleCI OIDC token. + ```hcl main.tf + terraform { + required_providers { + infisical = { + source = "infisical/infisical" + } + } + } + + provider "infisical" { + host = "https://app.infisical.com" + auth = { + oidc = { + identity_id = "f2f5ee4c-6223-461a-87c3-406a6b481462" + } + } + } + + resource "infisical_access_approval_policy" "prod-access-approval" { + project_id = "09eda1f8-85a3-47a9-8a6f-e27f133b2a36" + name = "my-approval-policy" + environment_slug = "prod" + secret_path = "/" + approvers = [ + { + type = "user" + username = "sheen+200@infisical.com" + }, + ] + required_approvals = 1 + enforcement_level = "soft" + } + ``` + + Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation; + the default TTL is `7200` seconds which can be adjusted. + + If an identity access token expires, it can no longer authenticate with the Infisical API. In this case, + a new access token should be obtained by performing another login operation. + + + + diff --git a/docs/documentation/platform/identities/oidc-auth/general.mdx b/docs/documentation/platform/identities/oidc-auth/general.mdx new file mode 100644 index 0000000000..776d175a4f --- /dev/null +++ b/docs/documentation/platform/identities/oidc-auth/general.mdx @@ -0,0 +1,170 @@ +--- +title: General +description: "Learn how to authenticate with Infisical from any platform or environment using OpenID Connect (OIDC)." +--- + +**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect. + +## Diagram + +The following sequence diagram illustrates the OIDC Auth workflow for authenticating clients with Infisical. + +```mermaid +sequenceDiagram + participant Client as Client + participant Idp as Identity Provider + participant Infis as Infisical + + Client->>Idp: Step 1: Request identity token + Idp-->>Client: Return JWT with verifiable claims + + Note over Client,Infis: Step 2: Login Operation + Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login + + Note over Infis,Idp: Step 3: Query verification + Infis->>Idp: Request JWT public key using OIDC Discovery + Idp-->>Infis: Return public key + + Note over Infis: Step 4: JWT validation + Infis->>Client: Return short-lived access token + + Note over Client,Infis: Step 5: Access Infisical API with Token + Client->>Infis: Make authenticated requests using the short-lived access token +``` + +## Concept + +At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful, +then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API. + +To be more specific: + +1. The client requests an identity token from its identity provider. +2. The client sends the identity token to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint. +3. Infisical fetches the public key that was used to sign the identity token from the identity provider using OIDC Discovery. +4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria. +5. If all is well, Infisical returns a short-lived access token that the client can use to make authenticated requests to the Infisical API. + + + Infisical needs network-level access to the identity provider configuration + endpoints. + + +## Guide + +In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method. + + + + To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**. + + ![identities organization](/images/platform/identities/identities-org.png) + + When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles. + + ![identities organization create](/images/platform/identities/identities-org-create.png) + + Now input a few details for your new identity. Here's some guidance for each field: + + - Name (required): A friendly name for the identity. + - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. + + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) + + ![identities create oidc auth method](/images/platform/identities/identities-org-create-oidc-auth-method.png) + + Restrict access by configuring the Subject, Audiences, and Claims fields + + Here's some more guidance on each field: + - OIDC Discovery URL: The URL used to retrieve the OpenID Connect configuration from the identity provider. This will be used to fetch the public key needed for verifying the provided JWT. + - Issuer: The unique identifier of the identity provider issuing the JWT. This value is used to verify the iss (issuer) claim in the JWT to ensure the token is issued by a trusted provider. + - CA Certificate: The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints. + - Subject: The expected principal that is the subject of the JWT. The `sub` (subject) claim in the JWT should match this value. + - Audiences: A list of intended recipients. This value is checked against the aud (audience) claim in the token. The token's aud claim should match at least one of the audiences for it to be valid. + - Claims: Additional information or attributes that should be present in the JWT for it to be valid. + - Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses. + - Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address. + + The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible. + + + + + + To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project. + + To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**. + + Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to. + + ![identities project](/images/platform/identities/identities-project.png) + + ![identities project create](/images/platform/identities/identities-project-create.png) + + + To access the Infisical API as the identity, you need to fetch an identity token from an identity provider and make a request to the `/api/v1/auth/oidc-auth/login` endpoint in exchange for an access token. + + We provide an example below of how authentication is done with Infisical using OIDC. It is a snippet from the [official Github secrets action](https://github.com/Infisical/secrets-action). + + #### Sample usage + ```javascript + export const oidcLogin = async ({ identityId, domain, oidcAudience }) => { + const idToken = await core.getIDToken(oidcAudience); + + const loginData = querystring.stringify({ + identityId, + jwt: idToken, + }); + + try { + const response = await axios({ + method: "post", + url: `${domain}/api/v1/auth/oidc-auth/login`, + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + data: loginData, + }); + + return response.data.accessToken; + } catch (err) { + core.error("Error:", err.message); + throw err; + } + }; + ``` + + #### Sample OIDC login response + + ```bash Response + { + "accessToken": "...", + "expiresIn": 7200, + "accessTokenMaxTTL": 43244 + "tokenType": "Bearer" + } + ``` + + + We recommend using one of Infisical's clients like SDKs or the Infisical Agent to authenticate with Infisical using OIDC Auth as they handle the authentication process including the fetching of identity tokens for you. + + + + Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation; + the default TTL is `7200` seconds which can be adjusted. + + If an identity access token expires, it can no longer authenticate with the Infisical API. In this case, + a new access token should be obtained by performing another login operation. + + + + diff --git a/docs/documentation/platform/identities/oidc-auth/github.mdx b/docs/documentation/platform/identities/oidc-auth/github.mdx new file mode 100644 index 0000000000..47352a3392 --- /dev/null +++ b/docs/documentation/platform/identities/oidc-auth/github.mdx @@ -0,0 +1,170 @@ +--- +title: Github +description: "Learn how to authenticate Github workflows with Infisical using OpenID Connect (OIDC)." +--- + +**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect. + +## Diagram + +The following sequence diagram illustrates the OIDC Auth workflow for authenticating Github workflows with Infisical. + +```mermaid +sequenceDiagram + participant Client as Github Workflow + participant Idp as Identity Provider + participant Infis as Infisical + + Client->>Idp: Step 1: Request identity token + Idp-->>Client: Return JWT with verifiable claims + + Note over Client,Infis: Step 2: Login Operation + Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login + + Note over Infis,Idp: Step 3: Query verification + Infis->>Idp: Request JWT public key using OIDC Discovery + Idp-->>Infis: Return public key + + Note over Infis: Step 4: JWT validation + Infis->>Client: Return short-lived access token + + Note over Client,Infis: Step 5: Access Infisical API with Token + Client->>Infis: Make authenticated requests using the short-lived access token +``` + +## Concept + +At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful, +then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API. + +To be more specific: + +1. The Github workflow requests an identity token from Github's identity provider. +2. The fetched identity token is sent to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint. +3. Infisical fetches the public key that was used to sign the identity token from Github's identity provider using OIDC Discovery. +4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria. +5. If all is well, Infisical returns a short-lived access token that the Github workflow can use to make authenticated requests to the Infisical API. + + + Infisical needs network-level access to Github's identity provider endpoints. + + +## Guide + +In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method. + + + + To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**. + + ![identities organization](/images/platform/identities/identities-org.png) + + When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles. + + ![identities organization create](/images/platform/identities/identities-org-create.png) + + Now input a few details for your new identity. Here's some guidance for each field: + + - Name (required): A friendly name for the identity. + - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. + + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) + + ![identities create oidc auth method](/images/platform/identities/identities-org-create-oidc-auth-method.png) + + Restrict access by configuring the Subject, Audiences, and Claims fields + + Here's some more guidance on each field: + - OIDC Discovery URL: The URL used to retrieve the OpenID Connect configuration from the identity provider. This will be used to fetch the public key needed for verifying the provided JWT. This should be set to `https://token.actions.githubusercontent.com` + - Issuer: The unique identifier of the identity provider issuing the JWT. This value is used to verify the iss (issuer) claim in the JWT to ensure the token is issued by a trusted provider. This should be set to `https://token.actions.githubusercontent.com` + - CA Certificate: The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints. For Github workflows, this can be left as blank. + - Subject: The expected principal that is the subject of the JWT. The format of the sub field for GitHub workflow OIDC tokens is as follows: `"repo:/:"`. The environment can be where the GitHub workflow is running, such as `environment`, `ref`, or `job_workflow_ref`. For example, if you have a repository owned by octocat named example-repo, and the GitHub workflow is running on the main branch, the subject field might look like this: `repo:octocat/example-repo:ref:refs/heads/main` + - Audiences: A list of intended recipients. This value is checked against the aud (audience) claim in the token. By default, set this to the URL of the repository owner, such as the organization that owns the repository (e.g. `https://github.com/octo-org`). + - Claims: Additional information or attributes that should be present in the JWT for it to be valid. You can refer to Github's [documentation](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#understanding-the-oidc-token) for the complete list of supported claims. + - Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses. + - Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address. + If you are unsure about what to configure for the subject, audience, and claims fields you can use [github/actions-oidc-debugger](https://github.com/github/actions-oidc-debugger) to get the appropriate values. Alternatively, you can fetch the JWT from the workflow and inspect the fields manually. + The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible. + + + To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project. + + To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**. + + Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to. + + ![identities project](/images/platform/identities/identities-project.png) + + ![identities project create](/images/platform/identities/identities-project-create.png) + + + As a prerequisite, you will need to set `id-token:write` permissions for the Github workflow. This setting allows the JWT to be requested from Github's OIDC provider. + + ```yaml + permissions: + id-token: write # This is required for requesting the JWT + ... + ``` + + To access the Infisical API as the identity, you need to fetch an identity token from Github's identity provider and make a request to the `/api/v1/auth/oidc-auth/login` endpoint in exchange for an access token. + The identity token can be fetched using either of the following approaches: + - Using environment variables on the runner (`ACTIONS_ID_TOKEN_REQUEST_URL` and `ACTIONS_ID_TOKEN_REQUEST_TOKEN`). + ```yaml + steps: + - name: Request OIDC Token + run: | + echo "Requesting OIDC token..." + TOKEN=$(curl -s -H "Authorization: Bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" "$ACTIONS_ID_TOKEN_REQUEST_URL" | jq -r '.value') + echo "TOKEN=$TOKEN" >> $GITHUB_ENV + ``` + + - Using `getIDToken()` from the Github Actions toolkit. + + Below is an example of how a Github workflow can be configured to fetch secrets from Infisical using the [Infisical Secrets Action](https://github.com/Infisical/secrets-action) with OIDC Auth. + ```yaml + name: Manual workflow + + on: + workflow_dispatch: + + permissions: + id-token: write # This is required for requesting the JWT + + jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: Infisical/secrets-action@v1.0.7 + with: + method: "oidc" + env-slug: "dev" + project-slug: "ggggg-9-des" + identity-id: "6b579c00-5c85-4b44-aabe-f8a + ... + ``` + + Preceding steps can then use the secret values injected onto the workflow's environment. + + + We recommend using [Infisical Secrets Action](https://github.com/Infisical/secrets-action) to authenticate with Infisical using OIDC Auth as it handles the authentication process including the fetching of identity tokens for you. + + + + Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation; + the default TTL is `7200` seconds which can be adjusted. + + If an identity access token expires, it can no longer authenticate with the Infisical API. In this case, + a new access token should be obtained by performing another login operation. + + + + diff --git a/docs/documentation/platform/identities/oidc-auth/gitlab.mdx b/docs/documentation/platform/identities/oidc-auth/gitlab.mdx new file mode 100644 index 0000000000..228392aa6c --- /dev/null +++ b/docs/documentation/platform/identities/oidc-auth/gitlab.mdx @@ -0,0 +1,145 @@ +--- +title: GitLab +description: "Learn how to authenticate GitLab pipelines with Infisical using OpenID Connect (OIDC)." +--- + +**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect. + +## Diagram + +The following sequence diagram illustrates the OIDC Auth workflow for authenticating GitLab pipelines with Infisical. + +```mermaid +sequenceDiagram + participant Client as GitLab Pipeline + participant Idp as GitLab Identity Provider + participant Infis as Infisical + + Client->>Idp: Step 1: Request identity token + Idp-->>Client: Return JWT with verifiable claims + + Note over Client,Infis: Step 2: Login Operation + Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login + + Note over Infis,Idp: Step 3: Query verification + Infis->>Idp: Request JWT public key using OIDC Discovery + Idp-->>Infis: Return public key + + Note over Infis: Step 4: JWT validation + Infis->>Client: Return short-lived access token + + Note over Client,Infis: Step 5: Access Infisical API with Token + Client->>Infis: Make authenticated requests using the short-lived access token +``` + +## Concept + +At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful, +then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API. + +To be more specific: + +1. The GitLab pipeline requests an identity token from GitLab's identity provider. +2. The fetched identity token is sent to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint. +3. Infisical fetches the public key that was used to sign the identity token from GitLab's identity provider using OIDC Discovery. +4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria. +5. If all is well, Infisical returns a short-lived access token that the GitLab pipeline can use to make authenticated requests to the Infisical API. + + + Infisical needs network-level access to GitLab's identity provider endpoints. + + +## Guide + +In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method. + + + + To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**. + + ![identities organization](/images/platform/identities/identities-org.png) + + When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles. + + ![identities organization create](/images/platform/identities/identities-org-create.png) + + Now input a few details for your new identity. Here's some guidance for each field: + + - Name (required): A friendly name for the identity. + - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. + + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) + + ![identities create oidc auth method](/images/platform/identities/identities-org-create-oidc-auth-method.png) + + Restrict access by configuring the Subject, Audiences, and Claims fields + + Here's some more guidance on each field: + - OIDC Discovery URL: The URL used to retrieve the OpenID Connect configuration from the identity provider. This will be used to fetch the public key needed for verifying the provided JWT. For GitLab SaaS (GitLab.com), this should be set to `https://gitlab.com`. For self-hosted GitLab instances, use the domain of your GitLab instance. + - Issuer: The unique identifier of the identity provider issuing the JWT. This value is used to verify the iss (issuer) claim in the JWT to ensure the token is issued by a trusted provider. This should also be set to the domain of the Gitlab instance. + - CA Certificate: The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints. For GitLab.com, this can be left blank. + - Subject: The expected principal that is the subject of the JWT. For GitLab pipelines, this should be set to a string that uniquely identifies the pipeline and its context, in the format `project_path:{group}/{project}:ref_type:{type}:ref:{branch_name}` (e.g., `project_path:example-group/example-project:ref_type:branch:ref:main`). + - Claims: Additional information or attributes that should be present in the JWT for it to be valid. You can refer to GitLab's [documentation](https://docs.gitlab.com/ee/ci/secrets/id_token_authentication.html#token-payload) for the list of supported claims. + - Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses. + - Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address. + For more details on the appropriate values for the OIDC fields, refer to GitLab's [documentation](https://docs.gitlab.com/ee/ci/secrets/id_token_authentication.html#token-payload). + The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible. + + + To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project. + + To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**. + + Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to. + + ![identities project](/images/platform/identities/identities-project.png) + + ![identities project create](/images/platform/identities/identities-project-create.png) + + + + As demonstration, we will be using the Infisical CLI to fetch Infisical secrets and utilize them within a GitLab pipeline. + + To access Infisical secrets as the identity, you need to use an identity token from GitLab which matches the OIDC configuration defined for the machine identity. + This can be done by defining the `id_tokens` property. The resulting token would then be used to login with OIDC like the following: `infisical login --method=oidc-auth --oidc-jwt=$GITLAB_TOKEN` + + Below is a complete example of how a GitLab pipeline can be configured to work with secrets from Infisical using the Infisical CLI with OIDC Auth: + + ```yaml + image: ubuntu + + stages: + - build + + build-job: + stage: build + id_tokens: + INFISICAL_ID_TOKEN: + aud: infisical-aud-test + script: + - apt update && apt install -y curl + - curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash + - apt-get update && apt-get install -y infisical + - export INFISICAL_TOKEN=$(infisical login --method=oidc-auth --machine-identity-id=4e807a78-1b1c-4bd6-9609-ef2b0cf4fd54 --oidc-jwt=$INFISICAL_ID_TOKEN --silent --plain) + - infisical run --projectId=1d0443c1-cd43-4b3a-91a3-9d5f81254a89 --env=dev -- npm run build + ``` + + The `id_tokens` keyword is used to request an ID token for the job. In this example, an ID token named `INFISICAL_ID_TOKEN` is requested with the audience (`aud`) claim set to "infisical-aud-test". This ID token will be used to authenticate with Infisical. + + Each identity access token has a time-to-live (TTL) which you can infer from the response of the login operation; the default TTL is `7200` seconds, which can be adjusted. + + If an identity access token expires, it can no longer authenticate with the Infisical API. In this case, a new access token should be obtained by performing another login operation. + + + + + diff --git a/docs/documentation/platform/identities/overview.mdx b/docs/documentation/platform/identities/overview.mdx index 18c173766f..f79633cb3c 100644 --- a/docs/documentation/platform/identities/overview.mdx +++ b/docs/documentation/platform/identities/overview.mdx @@ -4,7 +4,7 @@ sidebarTitle: "Overview" description: "Learn more about identities to interact with resources in Infisical." --- -To interact with secrets and resource with Infisical, it is important to undrestand the concept of identities. +To interact with secrets and resource with Infisical, it is important to understand the concept of identities. Identities can be of two types: - **People** (e.g., developers, platform engineers, administrators) - **Machines** (e.g., machine entities for managing secrets in CI/CD pipelines, production applications, and more) diff --git a/docs/documentation/platform/identities/token-auth.mdx b/docs/documentation/platform/identities/token-auth.mdx new file mode 100644 index 0000000000..59c5f9abf6 --- /dev/null +++ b/docs/documentation/platform/identities/token-auth.mdx @@ -0,0 +1,138 @@ +--- +title: Token Auth +description: "Learn how to authenticate to Infisical from any platform or environment using an access token." +--- + +**Token Auth** is a platform-agnostic, simple authentication method that can be configured for a [machine identity](/documentation/platform/identities/machine-identities) to authenticate from any platform/environment using a token. + +## Diagram + +The following sequence diagram illustrates the Token Auth workflow for authenticating clients with Infisical. + +```mermaid +sequenceDiagram + participant Client as Client + participant Infis as Infisical + + Note over Client,Infis: Access Infisical API with Token + Client->>Infis: Make authenticated requests using the token + +``` + +## Concept + +Token Auth is the simplest authentication method that a client can use to authenticate with Infisical. + +Unlike other authentication methods where a client must exchange credential(s) for a short-lived access token to access the Infisical API, +Token Auth allows a client to make authenticated requests to the Infisical API directly using a token. Conceptually, this is similar to using an API Key. + +To be more specific: + +1. An operator creates an access token in the Infisical UI. +2. The operator shares the access token with the client which it can then use to make authenticated requests to the Infisical API. + +## Guide + +In the following steps, we explore how to create and use identities for your workloads and applications to access the Infisical API +using the Token Auth authentication method. + + + + To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**. + + ![identities organization](/images/platform/identities/identities-org.png) + + When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles. + + ![identities organization create](/images/platform/identities/identities-org-create.png) + + Now input a few details for your new identity. Here's some guidance for each field: + + - Name (required): A friendly name for the identity. + - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. + + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + + Since the identity has been configured with Universal Auth by default, you should re-configure it to use Token Auth instead. To do this, press to edit the **Authentication** section, + remove the existing Universal Auth configuration, and add a new Token Auth configuration onto the identity. + + ![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png) + + ![identities organization create token auth method](/images/platform/identities/identities-org-create-token-auth-method.png) + + Here's some more guidance on each field: + + - Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time. + - Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses. + - Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address. + + + Restricting access token usage to specific trusted IPs is a paid feature. + + If you’re using Infisical Cloud, then it is available under the Pro Tier. If you’re self-hosting Infisical, then you should contact sales@infisical.com to purchase an enterprise license to use it. + + + + + In order to use the identity with Token Auth, you'll need to create an (access) token; you can think of this token akin + to an API Key used to authenticate with the Infisical API. With that, press **Create Token**. + + ![identities client secret create](/images/platform/identities/identities-token-auth-create-1.png) + + ![identities client secret create](/images/platform/identities/identities-token-auth-create-2.png) + + ![identities client secret create](/images/platform/identities/identities-token-auth-create-3.png) + + Copy the token and keep it handy as you'll need it to authenticate with the Infisical API. + + + + To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project. + + To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**. + + Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to. + + ![identities project](/images/platform/identities/identities-project.png) + + ![identities project create](/images/platform/identities/identities-project-create.png) + + + + To access the Infisical API as the identity, you can use the generated access token from step 2 + to authenticate with the [Infisical API](/api-reference/overview/introduction). + + + Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation; + the default TTL is `7200` seconds which can be adjusted in the Token Auth configuration. + + If an identity access token expires, it can no longer authenticate with the Infisical API. In this case, + a new access token should be obtained. + + + + + +**FAQ** + + + + There are a few reasons for why this might happen: + + - The access token has expired. If this is the case, you should obtain a new access token or consider extending the token's TTL. + - The identity is insufficently permissioned to interact with the resources you wish to access. + - The access token is being used from an untrusted IP. + + + A identity access token can have a time-to-live (TTL) or incremental lifetime after which it expires. + + In certain cases, you may want to extend the lifespan of an access token; to do so, you must set a max TTL parameter. + +A token can be renewed any number of times where each call to renew it can extend the token's lifetime by increments of the access token's TTL. +Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation. + + + diff --git a/docs/documentation/platform/identities/universal-auth.mdx b/docs/documentation/platform/identities/universal-auth.mdx index 09ed1cb7b1..5979780936 100644 --- a/docs/documentation/platform/identities/universal-auth.mdx +++ b/docs/documentation/platform/identities/universal-auth.mdx @@ -3,11 +3,11 @@ title: Universal Auth description: "Learn how to authenticate to Infisical from any platform or environment." --- -**Universal Auth** is a platform-agnostic authentication method that can be configured for a [machine identity](/documentation/platform/identities/machine-identities) suitable to authenticate from any platform/environment. +**Universal Auth** is a platform-agnostic authentication method that can be configured for a [machine identity](/documentation/platform/identities/machine-identities) to authenticate from any platform/environment using a Client ID and Client Secret. ## Diagram -The following sequence digram illustrates the Universal Auth workflow for authenticating clients with Infisical. +The following sequence diagram illustrates the Universal Auth workflow for authenticating clients with Infisical. ```mermaid sequenceDiagram @@ -55,9 +55,16 @@ using the Universal Auth authentication method. - Name (required): A friendly name for the identity. - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. + Once you've created an identity, you'll be redirected to a page where you can manage the identity. + + ![identities page](/images/platform/identities/identities-page.png) + Once you've created an identity, you'll be prompted to configure the **Universal Auth** authentication method for it. - ![identities organization create auth method](/images/platform/identities/identities-org-create-auth-method.png) + By default, the identity has been configured with Universal Auth. If you wish, you can edit the Universal Auth configuration + details by pressing to edit the **Authentication** section. + + ![identities organization create universal auth method](/images/platform/identities/identities-org-create-universal-auth-method.png) Here's some more guidance on each field: @@ -77,12 +84,12 @@ using the Universal Auth authentication method. In order to use the identity, you'll need the non-sensitive **Client ID** of the identity and a **Client Secret** for it; you can think of these credentials akin to a username - and password used to authenticate with the Infisical API. With that, press on the key icon on the identity to generate a **Client Secret** - for it. + and password used to authenticate with the Infisical API. + With that, press **Create Client Secret**. - ![identities client secret create](/images/platform/identities/identities-org-client-secret.png) - ![identities client secret create](/images/platform/identities/identities-org-client-secret-create-1.png) - ![identities client secret create](/images/platform/identities/identities-org-client-secret-create-2.png) + ![identities client secret create](/images/platform/identities/identities-universal-auth-create-1.png) + ![identities client secret create](/images/platform/identities/identities-universal-auth-create-2.png) + ![identities client secret create](/images/platform/identities/identities-universal-auth-create-3.png) Feel free to input any (optional) details for the **Client Secret** configuration: @@ -131,7 +138,7 @@ using the Universal Auth authentication method. Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation; - the default TTL is `7200` seconds which can be adjusted. + the default TTL is `7200` seconds which can be adjusted in the Universal Auth configuration. If an identity access token expires, it can no longer authenticate with the Infisical API. In this case, a new access token should be obtained by performing another login operation. @@ -148,7 +155,6 @@ using the Universal Auth authentication method. - The client secret or access token has expired. - The identity is insufficently permissioned to interact with the resources you wish to access. - - You are attempting to access a `/raw` secrets endpoint that requires your project to disable E2EE. - The client secret/access token is being used from an untrusted IP. @@ -156,8 +162,8 @@ using the Universal Auth authentication method. In certain cases, you may want to extend the lifespan of an access token; to do so, you must set a max TTL parameter. -A token can be renewed any number of time and each call to renew it will extend the toke life by increments of access token TTL. -Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation +A token can be renewed any number of times where each call to renew it can extend the token's lifetime by increments of the access token's TTL. +Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation. diff --git a/docs/documentation/platform/kms-configuration/aws-hsm.mdx b/docs/documentation/platform/kms-configuration/aws-hsm.mdx new file mode 100644 index 0000000000..e7bd03fd10 --- /dev/null +++ b/docs/documentation/platform/kms-configuration/aws-hsm.mdx @@ -0,0 +1,82 @@ +--- +title: "AWS CloudHSM" +description: "Learn how to manage encryption using AWS CloudHSM" +--- + +This guide provides instructions on securing Infisical project secrets using AWS CloudHSM. +Integration with AWS CloudHSM is achieved by configuring it as a custom key store for AWS KMS. +Follow the steps below to set up AWS KMS with AWS CloudHSM as the custom key store. + +## Prepare AWS CloudHSM Cluster + +Before you get started, you'll need to configure a AWS CloudHSM cluster which meets the following criteria: + +- The cluster must be active. +- The cluster must not be associated with any other AWS KMS custom key store. +- The cluster must be configured with private subnets in at least two Availability Zones in the Region. +- The security group for the cluster must include inbound and outbound rules that allow TCP traffic on ports 2223-2225. +- The cluster must contain at least two active HSMs in different Availability Zones. + +For more details on setting up your cluster, refer to the following [AWS documentation](https://docs.aws.amazon.com/kms/latest/developerguide/create-keystore.html#before-keystore). + +## Set Up AWS KMS Custom Key Store + +To set up an AWS KMS custom key store with AWS CloudHSM, you will need the following: + +- The trust anchor certificate of your AWS CloudHSM cluster. +- A `kmsuser` user in the AWS CloudHSM cluster with the crypto-user role. + + + + In the AWS console, head over to `AWS KMS` > `AWS CloudHSM key stores` and click **Create key store**. + + + Input the custom key store name. ![Set key store name](../../../images/platform/kms/aws-hsm/create-key-store-name.png) + + + Select the AWS CloudHSM cluster. You should be able to select the cluster if it meets the required criteria mentioned above. + ![Set key store cluster](../../../images/platform/kms/aws-hsm/create-key-store-cluster.png) + + + Upload your CloudHSM's cluster trust anchor certificate file. + ![Set key store cert](../../../images/platform/kms/aws-hsm/create-key-store-cert.png) + + + Input the password of the `kmsuser` crypto-user in your cluster. + ![Set key store password](../../../images/platform/kms/aws-hsm/create-key-store-password.png) + + + Proceed with creating the AWS CloudHSM key store. + + + +For more details, refer to the following [AWS documentation](https://docs.aws.amazon.com/kms/latest/developerguide/create-keystore.html#create-keystore-console). + +## Create AWS KMS Key +Next, you'll need to create a AWS KMS key where you will set the key store you created previously. + + + + In your AWS console, proceed to `AWS KMS` > `Customer managed keys` and click **Create**. + + + Set Key type to `Symmetric` and Key usage to `Encrypt and decrypt`. + ![Set key options 1](../../../images/platform/kms/aws-hsm/create-kms-key-1.png) + + + In the advanced options, for the Key material origin field, select `AWS CloudHSM key store`. Then, click next. + ![Set key options 2](../../../images/platform/kms/aws-hsm/create-kms-key-2.png) + + + Select the AWS CloudHSM key store you created earlier. + ![Select HSM 1](../../../images/platform/kms/aws-hsm/create-kms-select-hsm.png) + + + Proceed with creating the AWS KMS Key. + + + +## Connect Infisical to AWS KMS Key + +You should now have an AWS KMS that has a custom key store set to AWS CloudHSM. +To secure project resources, you will need to add this AWS KMS to your Infisical organization. To learn how, refer to the documentation [here](./aws-kms). \ No newline at end of file diff --git a/docs/documentation/platform/kms-configuration/aws-kms.mdx b/docs/documentation/platform/kms-configuration/aws-kms.mdx new file mode 100644 index 0000000000..f9fa54b4dd --- /dev/null +++ b/docs/documentation/platform/kms-configuration/aws-kms.mdx @@ -0,0 +1,151 @@ +--- +title: "AWS Key Management Service" +description: "Learn how to manage encryption using AWS KMS" +--- + +To enhance the security of your Infisical projects, you can now encrypt your secrets using an external Key Management Service (KMS). +When external KMS is configured for your project, all encryption and decryption operations will be handled by the chosen KMS. +This guide will walk you through the steps needed to configure external KMS support with AWS KMS. + +## Prerequisites + +Before you begin, you'll first need to choose a method of authentication with AWS from below. + + + + + + 1. Navigate to the [Create IAM Role](https://console.aws.amazon.com/iamv2/home#/roles/create?step=selectEntities) page in your AWS Console. + ![IAM Role Creation](/images/integrations/aws/integration-aws-iam-assume-role.png) + + 2. Select **AWS Account** as the **Trusted Entity Type**. + 3. Choose **Another AWS Account** and enter **381492033652** (Infisical AWS Account ID). This restricts the role to be assumed only by Infisical. If you are self-hosting, provide the AWS account number where Infisical is hosted. + 4. Optionally, enable **Require external ID** and enter your Infisical **project ID** to further enhance security. + + + Use the following custom policy to grant the minimum permissions required by Infisical to integrate with AWS KMS + + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowKMSAccess", + "Effect": "Allow", + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:DescribeKey" + ], + "Resource": "*" + } + ] + } + ``` + + + + + + Navigate to your IAM user and add a policy to grant the following permissions: + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowKMSAccess", + "Effect": "Allow", + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:DescribeKey" + ], + "Resource": "*" + } + ] + } + ``` + + + +## Setup AWS KMS in the Organization Settings + +Next, you will need to follow the steps listed below to add AWS KMS for your organization. + + + + ![Open encryption org settings](../../../images/platform/kms/aws/encryption-org-settings.png) + + + ![Add encryption org settings](../../../images/platform/kms/aws/encryption-org-settings-add.png) + Click the 'Add' button to begin adding a new external KMS. + + + ![Select Encryption Provider](../../../images/platform/kms/aws/encryption-modal-provider-select.png) + Choose 'AWS KMS' from the list of encryption providers. + + + Selecting AWS as the provider will require you input the following fields. + + + Name for referencing the AWS KMS key within the organization. + + + + Short description of the AWS KMS key. + + + + Authentication mode for AWS, either "AWS Assume Role" or "Access Key". + + + + ARN of the AWS role to assume for providing Infisical access to the AWS KMS Key (required if Authentication Mode is "AWS Assume Role") + + + + Custom identifier for additional validation during role assumption. + + + + AWS IAM Access Key ID for authentication (required if Authentication Mode is "Access Key"). + + + + AWS IAM Secret Access Key for authentication (required if Authentication Mode is "Access Key"). + + + + AWS region where the AWS KMS Key is located. + + + Key ID of the AWS KMS Key. If left blank, Infisical will generate and use a new AWS KMS Key in the specified region. + ![AWS KMS key ID](../../../images/platform/kms/aws/aws-kms-key-id.png) + + + + + Save your configuration to apply the settings. + + + +You now have an AWS KMS Key configured at the organization level. You can assign these AWS KMS keys to existing Infisical projects by visiting the 'Project Settings' page. + +## Assign AWS KMS Key to an Existing Project + +To assign the AWS KMS key you added to your organization, follow the steps below. + + + + ![Open encryption project + settings](../../../images/platform/kms/aws/encryption-project-settings.png) + + + ![Select encryption project + settings](../../../images/platform/kms/aws/encryption-project-settings-select.png) + Choose the AWS KMS key you configured earlier. + + + Once you have selected the KMS of choice, click save. + + diff --git a/docs/documentation/platform/kms-configuration/overview.mdx b/docs/documentation/platform/kms-configuration/overview.mdx new file mode 100644 index 0000000000..327481bc40 --- /dev/null +++ b/docs/documentation/platform/kms-configuration/overview.mdx @@ -0,0 +1,28 @@ +--- +title: "Key Management Service (KMS) Configuration" +sidebarTitle: "Overview" +description: "Learn how to configure your project's encryption" +--- + +## Introduction + +Infisical leverages a Key Management Service (KMS) to securely encrypt and decrypt secrets in your projects. + +## Overview + +Infisical's KMS ensures the security of your project's secrets through the following mechanisms: + +- Each project is assigned a unique workspace key, which is responsible for encrypting and decrypting secret values. +- The workspace key itself is encrypted using the project's configured KMS. +- When secrets are requested, the workspace key is derived from the configured KMS. This key is then used to decrypt the secret values on-demand before sending them to the requesting client. + +## Configuration + +You can set the KMS for new projects during project creation. +![Configure KMS new](../../../images/platform/kms/configure-kms-new.png) +For existing projects, you can configure the KMS from the Project Settings page. +![Configure KMS existing](../../../images/platform/kms/configure-kms-existing.png) + +## External KMS + +Infisical supports the use of external KMS solutions to enhance security and compliance. You can configure your project to use services like [AWS Key Management Service](./aws-kms) for managing encryption. \ No newline at end of file diff --git a/docs/documentation/platform/kms/hsm-integration.mdx b/docs/documentation/platform/kms/hsm-integration.mdx new file mode 100644 index 0000000000..4f9efe49f2 --- /dev/null +++ b/docs/documentation/platform/kms/hsm-integration.mdx @@ -0,0 +1,262 @@ +--- +title: "HSM Integration" +description: "Learn more about integrating an HSM with Infisical KMS." +--- + + + Changing the encryption strategy for your instance is an Enterprise-only feature. + This section is intended for users who have obtained an Enterprise license and are on-premise. + + + Please reach out to sales@infisical.com if you have any questions. + + +## Overview + +Infisical KMS currently supports two encryption strategies: +1. **Standard Encryption**: This is the default encryption strategy used by Infisical KMS. It uses a software-protected encryption key to encrypt KMS keys within your Infisical instance. The root encryption key is defined by setting the `ENCRYPTION_KEY` environment variable. +2. **Hardware Security Module (HSM)**: This encryption strategy uses a Hardware Security Module (HSM) to create a root encryption key that is stored on a physical device to encrypt the KMS keys within your instance. + +## Hardware Security Module (HSM) + +![HSM Illustration](/images/platform/kms/hsm/hsm-illustration.png) + +Using a hardware security module comes with the added benefit of having a secure and tamper-proof device to store your encryption keys. This ensures that your data is protected from unauthorized access. + + + All encryption keys used for cryptographic operations are stored within the HSM. This means that if the HSM is lost or destroyed, you will no longer be able to decrypt your data stored within Infisical. Most providers offer recovery options for HSM devices, which you should consider when setting up an HSM device. + + +Enabling HSM encryption has a set of key benefits: +1. **Root Key Wrapping**: The root KMS encryption key that is used to secure your Infisical instance will be encrypted using the HSM device rather than the standard software-protected key. +2. **FIPS 140-2/3 Compliance**: Using an HSM device ensures that your Infisical instance is FIPS 140-2 or FIPS 140-3 compliant. For FIPS 140-3, ensure that your HSM is FIPS 140-3 validated. + +#### Caveats +- **Performance**: Using an HSM device can have a performance impact on your Infisical instance. This is due to the additional latency introduced by the HSM device. This is however only noticeable when your instance(s) start up or when the encryption strategy is changed. +- **Key Recovery**: If the HSM device is lost or destroyed, you will no longer be able to decrypt your data stored within Infisical. Most HSM providers offer recovery options, which you should consider when setting up an HSM device. + +### Requirements +- An Infisical instance with a version number that is equal to or greater than `v0.91.0`. +- If you are using Docker, your instance must be using the `infisical/infisical-fips` image. +- An HSM device from a provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm), [AWS CloudHSM](https://aws.amazon.com/cloudhsm/), or others. + + +### FIPS Compliance +FIPS, also known as the Federal Information Processing Standard, is a set of standards that are used to accredit cryptographic modules. FIPS 140-2 and FIPS 140-3 are the two most common standards used for cryptographic modules. If your HSM uses FIPS 140-3 validated hardware, Infisical will automatically be FIPS 140-3 compliant. If your HSM uses FIPS 140-2 validated hardware, Infisical will be FIPS 140-2 compliant. + +HSM devices are especially useful for organizations that operate in regulated industries such as healthcare, finance, and government, where data security and compliance are of the utmost importance. + +For organizations that work with US government agencies, FIPS compliance is almost always a requirement when dealing with sensitive information. FIPS compliance ensures that the cryptographic modules used by the organization meet the security requirements set by the US government. + +## Setup Instructions + + + + + To set up HSM encryption, you need to configure an HSM provider and HSM key. The HSM provider is used to connect to the HSM device, and the HSM key is used to encrypt Infisical's KMS keys. We recommend using a Cloud HSM provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm) or [AWS CloudHSM](https://aws.amazon.com/cloudhsm/). + + You need to follow the instructions provided by the HSM provider to set up the HSM device. Once the HSM device is set up, the HSM device can be used within Infisical. + + After setting up the HSM from your provider, you will have a set of files that you can use to access the HSM. These files need to be present on the machine where Infisical is running. + If you are using containers, you will need to mount the folder where these files are stored as a volume in the container. + + The setup process for an HSM device varies depending on the provider. We have created a guide for Thales Luna Cloud HSM, which you can find below. + + + + + + Are you using Docker? If you are using Docker, please follow the instructions in the [Using HSM's with Docker](#using-hsms-with-docker) section. + + + Configuring the HSM on Infisical requires setting a set of environment variables: + - `HSM_LIB_PATH`: The path to the PKCS#11 library provided by the HSM provider. This usually comes in the form of a `.so` for Linux and MacOS, or a `.dll` file for Windows. For Docker, you need to mount the library path as a volume. Further instructions can be found below. If you are using Docker, make sure to set the HSM_LIB_PATH environment variable to the path where the library is mounted in the container. + - `HSM_PIN`: The PKCS#11 PIN to use for authentication with the HSM device. + - `HSM_SLOT`: The slot number to use for the HSM device. This is typically between `0` and `5` for most HSM devices. + - `HSM_KEY_LABEL`: The label of the key to use for encryption. **Please note that if no key is found with the provided label, the HSM will create a new key with the provided label.** + + You can read more about the [default instance configurations](/self-hosting/configuration/envars) here. + + + After setting up the HSM, you need to restart the Infisical instance for the changes to take effect. + + + ![Server Admin Console](/images/platform/kms/hsm/server-admin-console.png) + + + ![Set Encryption Strategy](/images/platform/kms/hsm/encryption-strategy.png) + + Once you press the 'Save' button, your Infisical instance will immediately switch to the HSM encryption strategy. This will re-encrypt your KMS key with keys from the HSM device. + + + To verify that the HSM was correctly configured, you can try creating a new secret in one of your projects. If the secret is created successfully, the HSM is now being used for encryption. + + + + +## Using HSMs with Docker +When using Docker, you need to mount the path containing the HSM client files. This section covers how to configure your Infisical instance to use an HSM with Docker. + + + + + When using Docker, you are able to set your HSM library path to any location on your machine. In this example, we are going to be using `/etc/luna-docker`. + + ```bash + mkdir /etc/luna-docker + ``` + + After [setting up your Luna Cloud HSM client](https://thalesdocs.com/gphsm/luna/7/docs/network/Content/install/client_install/add_dpod.htm), you should have a set of files, referred to as the HSM client. You don't need all the files, but for simplicity we recommend copying all the files from the client. + + A folder structure of a client folder will often look like this: + ``` + partition-ca-certificate.pem + partition-certificate.pem + server-certificate.pem + Chrystoki.conf + /plugins + libcloud.plugin + /lock + /libs + /64 + libCryptoki2.so + /jsp + LunaProvider.jar + /64 + libLunaAPI.so + /etc + openssl.cnf + /bin + /64 + ckdemo + lunacm + multitoken + vtl + ``` + + The most important parts of the client folder is the `Chrystoki.conf` file, and the `libs`, `plugins`, and `jsp` folders. You need to copy these files to the folder you created in the first step. + + ```bash + cp -r / /etc/luna-docker + ``` + + + + + The `Chrystoki.conf` file is used to configure the HSM client. You need to update the `Chrystoki.conf` file to point to the correct file paths. + + In this example, we will be mounting the `/etc/luna-docker` folder to the Docker container under a different path. The path we will use in this example is `/usr/safenet/lunaclient`. This means `/etc/luna-docker` will be mounted to `/usr/safenet/lunaclient` in the Docker container. + + An example config file will look like this: + + ```Chrystoki.conf + Chrystoki2 = { + # This path points to the mounted path, /usr/safenet/lunaclient + LibUNIX64 = /usr/safenet/lunaclient/libs/64/libCryptoki2.so; + } + + Luna = { + DefaultTimeOut = 500000; + PEDTimeout1 = 100000; + PEDTimeout2 = 200000; + PEDTimeout3 = 20000; + KeypairGenTimeOut = 2700000; + CloningCommandTimeOut = 300000; + CommandTimeOutPedSet = 720000; + } + + CardReader = { + LunaG5Slots = 0; + RemoteCommand = 1; + } + + Misc = { + # Update the paths to point to the mounted path if your folder structure is different from the one mentioned in the previous step. + PluginModuleDir = /usr/safenet/lunaclient/plugins; + MutexFolder = /usr/safenet/lunaclient/lock; + PE1746Enabled = 1; + ToolsDir = /usr/bin; + + } + + Presentation = { + ShowEmptySlots = no; + } + + LunaSA Client = { + ReceiveTimeout = 20000; + # Update the paths to point to the mounted path if your folder structure is different from the one mentioned in the previous step. + SSLConfigFile = /usr/safenet/lunaclient/etc/openssl.cnf; + ClientPrivKeyFile = ./etc/ClientNameKey.pem; + ClientCertFile = ./etc/ClientNameCert.pem; + ServerCAFile = ./etc/CAFile.pem; + NetClient = 1; + TCPKeepAlive = 1; + } + + + REST = { + AppLogLevel = error + ServerName = ; + ServerPort = 443; + AuthTokenConfigURI = ; + AuthTokenClientId = ; + AuthTokenClientSecret = ; + RestClient = 1; + ClientTimeoutSec = 120; + ClientPoolSize = 32; + ClientEofRetryCount = 15; + ClientConnectRetryCount = 900; + ClientConnectIntervalMs = 1000; + } + XTC = { + Enabled = 1; + TimeoutSec = 600; + } + ``` + + Save the file after updating the paths. + + + + Running Docker with HSM encryption requires setting the HSM-related environment variables as mentioned previously in the [HSM setup instructions](#setup-instructions). You can set these environment variables in your Docker run command. + + We are setting the environment variables for Docker via the command line in this example, but you can also pass in a `.env` file to set these environment variables. + + + If no key is found with the provided key label, the HSM will create a new key with the provided label. + Infisical depends on an AES and HMAC key to be present in the HSM. If these keys are not present, Infisical will create them. The AES key label will be the value of the `HSM_KEY_LABEL` environment variable, and the HMAC key label will be the value of the `HSM_KEY_LABEL` environment variable with the suffix `_HMAC`. + + + ```bash + docker run -p 80:8080 \ + -v /etc/luna-docker:/usr/safenet/lunaclient \ + -e HSM_LIB_PATH="/usr/safenet/lunaclient/libs/64/libCryptoki2.so" \ + -e HSM_PIN="" \ + -e HSM_SLOT= \ + -e HSM_KEY_LABEL="" \ + + # The rest are unrelated to HSM setup... + -e ENCRYPTION_KEY="<>" \ + -e AUTH_SECRET="<>" \ + -e DB_CONNECTION_URI="<>" \ + -e REDIS_URL="<>" \ + -e SITE_URL="<>" \ + infisical/infisical-fips: # Replace with the version you want to use + ``` + + We recommend reading further about [using Infisical with Docker](/self-hosting/deployment-options/standalone-infisical). + + + + After following these steps, your Docker setup will be ready to use HSM encryption. + + + +## Disabling HSM Encryption + +To disable HSM encryption, navigate to Infisical's Server Admin Console and set the KMS encryption strategy to `Software-based Encryption`. This will revert the encryption strategy back to the default software-based encryption. + + + In order to disable HSM encryption, the Infisical instance must be able to access the HSM device. If the HSM device is no longer accessible, you will not be able to disable HSM encryption. + \ No newline at end of file diff --git a/docs/documentation/platform/kms/kubernetes-encryption.mdx b/docs/documentation/platform/kms/kubernetes-encryption.mdx new file mode 100644 index 0000000000..748ac8c810 --- /dev/null +++ b/docs/documentation/platform/kms/kubernetes-encryption.mdx @@ -0,0 +1,5 @@ +--- +title: "Kubernetes Encryption with KMS" +sidebarTitle: "Kubernetes Encryption" +url: "https://github.com/Infisical/k8-kms-plugin" +--- diff --git a/docs/documentation/platform/kms/overview.mdx b/docs/documentation/platform/kms/overview.mdx new file mode 100644 index 0000000000..8991646cf3 --- /dev/null +++ b/docs/documentation/platform/kms/overview.mdx @@ -0,0 +1,212 @@ +--- +title: "Key Management Service (KMS)" +sidebarTitle: "Overview" +description: "Learn how to manage and use cryptographic keys with Infisical." +--- + +## Concept + +Infisical can be used as a Key Management System (KMS), referred to as Infisical KMS, to centralize management of keys to be used for cryptographic operations like encryption/decryption. + +By default your Infisical data such as projects and the data within them are encrypted at rest using Infisical's own KMS. This ensures that your data is secure and protected from unauthorized access. + +If you are on-premise, your KMS root key will be created at random with the `ROOT_ENCRYPTION_KEY` environment variable. You can also use a Hardware Security Module (HSM), to create the root key. Read more about [HSM](/docs/documentation/platform/kms/encryption-strategies). + + + Keys managed in KMS are not extractable from the platform. Additionally, data + is never stored when performing cryptographic operations. + + +## Workflow + +The typical workflow for using Infisical KMS consists of the following steps: + +1. Creating a KMS key. As part of this step, you specify a name for the key and the encryption algorithm meant to be used for it (e.g. `AES-GCM-128`, `AES-GCM-256`). +2. Encryption: To encrypt data, you would make a request to the Infisical KMS API endpoint, specifying the base64-encoded plaintext and the intended key to use for encryption; the API would return the base64-encoded ciphertext. +3. Decryption: To decrypt data, you would make a request to the Infisical KMS API endpoint, specifying the base64-encoded ciphertext and the intended key to use for decryption; the API would return the base64-encoded plaintext. + + + Note that this workflow can be executed via the Infisical UI or manually such + as via API. + + +## Guide to Encrypting Data + +In the following steps, we explore how to generate a key and use it to encrypt data. + + + + + + Navigate to Project > Key Management and tap on the **Add Key** button. + ![kms add key button](/images/platform/kms/infisical-kms/kms-add-key.png) + + Specify your key details. Here's some guidance on each field: + + - Name: A slug-friendly name for the key. + - Type: The encryption algorithm associated with the key (e.g. `AES-GCM-256`). + - Description: An optional description of what the intended usage is for the key. + + ![kms add key modal](/images/platform/kms/infisical-kms/kms-add-key-modal.png) + + + Once your key is generated, open the options menu for the newly created key and select encrypt data. + ![kms key options](/images/platform/kms/infisical-kms/kms-key-options.png) + + Populate the text area with your data and tap on the Encrypt button. + ![kms encrypt data](/images/platform/kms/infisical-kms/kms-encrypt-data.png) + + + If your data is already Base64 encoded make sure to toggle the respective switch on to avoid + redundant encoding. + + + Copy and store the encrypted data. + ![kms encrypted data](/images/platform/kms/infisical-kms/kms-encrypted-data.png) + + + + + + + To create a cryptographic key, make an API request to the [Create KMS + Key](/api-reference/endpoints/kms/keys/create) API endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/kms/keys \ + --header 'Content-Type: application/json' \ + --data '{ + "projectId": "", + "name": "my-secret-key", + "description": "...", + "encryptionAlgorithm": "aes-256-gcm" + }' + ``` + + ### Sample response + + ```bash Response + { + "key": { + "id": "", + "description": "...", + "isDisabled": false, + "isReserved": false, + "orgId": "", + "name": "my-secret-key", + "createdAt": "2023-11-07T05:31:56Z", + "updatedAt": "2023-11-07T05:31:56Z", + "projectId": "" + } + } + ``` + + + To encrypt data, make an API request to the [Encrypt + Data](/api-reference/endpoints/kms/keys/encrypt) API endpoint, + specifying the key to use. + + + Make sure your data is Base64 encoded + + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/kms/keys//encrypt \ + --header 'Content-Type: application/json' \ + --data '{ + "plaintext": "lUFHM5Ggwo6TOfpuN1S==" // base64 encoded plaintext + }' + ``` + + ### Sample response + + ```bash Response + { + "ciphertext": "HwFHwSFHwlMF6TOfp==" // base64 encoded ciphertext + } + ``` + + + + + + +## Guide to Decrypting Data + +In the following steps, we explore how to use decrypt data using an existing key in Infisical KMS. + + + + + + Navigate to Project > Key Management and open the options menu for the key used to encrypt the data + you want to decrypt. + ![kms key options](/images/platform/kms/infisical-kms/kms-decrypt-options.png) + + + + Paste your encrypted data into the text area and tap on the Decrypt button. Optionally, if your data was + originally plain text, enable the decode Base64 switch. + ![kms decrypt data](/images/platform/kms/infisical-kms/kms-decrypt-data.png) + + Your decrypted data will be displayed and can be copied for use. + ![kms decrypted data](/images/platform/kms/infisical-kms/kms-decrypted-data.png) + + + + + + + + To decrypt data, make an API request to the [Decrypt + Data](/api-reference/endpoints/kms/keys/decrypt) API endpoint, + specifying the key to use. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/kms/keys//decrypt \ + --header 'Content-Type: application/json' \ + --data '{ + "ciphertext": "HwFHwSFHwlMF6TOfp==" // base64 encoded ciphertext + }' + ``` + + ### Sample response + + ```bash Response + { + "plaintext": "lUFHM5Ggwo6TOfpuN1S==" // base64 encoded plaintext + } + ``` + + + + + + + +## FAQ + + + + No. Infisical's KMS only provides cryptographic services and does not store + any encrypted or decrypted data. + + + No. Infisical's KMS will never expose your keys, encrypted or decrypted, to + external sources. + + + Currently, Infisical only supports `AES-128-GCM` and `AES-256-GCM` for + encryption operations. We anticipate supporting more algorithms and + cryptographic operations in the coming months. + + diff --git a/docs/documentation/platform/ldap/general.mdx b/docs/documentation/platform/ldap/general.mdx index 5e4253a344..939eaa7270 100644 --- a/docs/documentation/platform/ldap/general.mdx +++ b/docs/documentation/platform/ldap/general.mdx @@ -30,6 +30,7 @@ Prerequisites: - Bind DN: The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`. - Bind Pass: The password to use along with `Bind DN` when performing the user search. - User Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=acme,dc=com`. + - Unique User Attribute: The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID` ... If left blank, defaults to `uidNumber` - User Search Filter (optional): Template used to construct the LDAP user search filter such as `(uid={{username}})`; use literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas. - Group Search Base / Group DN (optional): LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`. - Group Filter (optional): Template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: [`UserDN`, `UserName`]. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas. diff --git a/docs/documentation/platform/ldap/jumpcloud.mdx b/docs/documentation/platform/ldap/jumpcloud.mdx index b92b52bb90..39579b7850 100644 --- a/docs/documentation/platform/ldap/jumpcloud.mdx +++ b/docs/documentation/platform/ldap/jumpcloud.mdx @@ -39,6 +39,7 @@ Prerequisites: - Bind DN: The distinguished name of object to bind when performing the user search (`uid=,ou=Users,o=,dc=jumpcloud,dc=com`). - Bind Pass: The password to use along with `Bind DN` when performing the user search. - User Search Base / User DN: Base DN under which to perform user search (`ou=Users,o=,dc=jumpcloud,dc=com`). + - Unique User Attribute: The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID` ... If left blank, defaults to `uidNumber` - User Search Filter (optional): Template used to construct the LDAP user search filter (`(uid={{username}})`). - Group Search Base / Group DN (optional): LDAP search base to use for group membership search (`ou=Users,o=,dc=jumpcloud,dc=com`). - Group Filter (optional): Template used when constructing the group membership query (`(&(objectClass=groupOfNames)(member=uid={{.Username}},ou=Users,o=,dc=jumpcloud,dc=com))`) diff --git a/docs/documentation/platform/ldap/overview.mdx b/docs/documentation/platform/ldap/overview.mdx index 4d6c75e153..c1a980b041 100644 --- a/docs/documentation/platform/ldap/overview.mdx +++ b/docs/documentation/platform/ldap/overview.mdx @@ -14,8 +14,6 @@ then you should contact sales@infisical.com to purchase an enterprise license to You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol). -To note, configuring LDAP retains the end-to-end encrypted nature of authentication in Infisical because we decouple the authentication and decryption steps; the LDAP server cannot and will not have access to the decryption key needed to decrypt your secrets. - LDAP providers: - Active Directory @@ -38,7 +36,7 @@ If the documentation for your required identity provider is not shown in the lis verification step upon their first login. If you're running a self-hosted instance of Infisical and would like it to trust emails from external identity providers, - you can configure this behavior in the admin panel. + you can configure this behavior in the Server Admin Console. diff --git a/docs/documentation/platform/mfa.mdx b/docs/documentation/platform/mfa.mdx index 3ca9c5dff7..1f61fae5c8 100644 --- a/docs/documentation/platform/mfa.mdx +++ b/docs/documentation/platform/mfa.mdx @@ -4,16 +4,59 @@ sidebarTitle: "MFA" description: "Learn how to secure your Infisical account with MFA." --- -MFA requires users to provide multiple forms of identification to access their account. Currently, this means logging in with your password and a 6-digit code sent to your email. +MFA requires users to provide multiple forms of identification to access their account. ## Email 2FA -Check the box in Personal Settings > Two-factor Authentication to enable email-based 2FA. +If 2-factor authentication is enabled in the Personal settings page, email will be used for MFA by default. -![Email-based MFA](../../images/mfa-email.png) +![Email-based MFA](/images/mfa-email.png) + +## Mobile Authenticator 2FA + +You can use any mobile authenticator app (Authy, Google Authenticator, Duo, etc.) to secure your account. After registration with an authenticator, select **Mobile Authenticator** as your 2FA method. +![Authenticator-based MFA](/images/mfa-authenticator.png) + +## Entra ID / Azure AD MFA - Infisical currently supports email-based 2FA. We're actively working on - building support for other forms of identification via SMS and Authenticator - App. + Before proceeding make sure you've enabled [SAML SSO for Entra ID / Azure AD](./sso/azure). + + We also encourage you to have your team download and setup the + [Microsoft Authenticator App](https://www.microsoft.com/en-us/security/mobile-authenticator-app) prior to enabling MFA. + + + + + ![Entra Infisical + app](/images/platform/mfa/entra/mfa_entra_infisical_app.png) + + + ![conditional + access](/images/platform/mfa/entra/mfa_entra_conditional_access.png) + + + ![create policy](/images/platform/mfa/entra/mfa_entra_create_policy.png) + + + ![require MFA and review + policy](/images/platform/mfa/entra/mfa_entra_review_policy.png) + + By default all users except the configuring admin will be setup to require + MFA. Microsoft encourages keeping at least one admin excluded from MFA to + prevent accidental lockout. + + + + ![enable policy and + confirm](/images/platform/mfa/entra/mfa_entra_confirm_policy.png) + + + ![mfa login](/images/platform/mfa/entra/mfa_entra_login.png) + + If users have not setup MFA for Entra / Azure they will be prompted to do + so at this time. + + + diff --git a/docs/documentation/platform/organization.mdx b/docs/documentation/platform/organization.mdx index d45bb6d4f8..f1c62ff37f 100644 --- a/docs/documentation/platform/organization.mdx +++ b/docs/documentation/platform/organization.mdx @@ -16,25 +16,26 @@ as well as create a new project. The **Settings** page lets you manage information about your organization including: -- Name: The name of your organization. -- Incident contacts: Emails that should be alerted if anything abnormal is detected within the organization. +- **Name**: The name of your organization. +- **Slug**: The slug of your organization. +- **Default Organization Member Role**: The role assigned to users when joining your organization unless otherwise specified. +- **Incident Contacts**: Emails that should be alerted if anything abnormal is detected within the organization. ![organization settings general](../../images/platform/organization/organization-settings-general.png) - -- Security and Authentication: A set of setting to enforce or manage [SAML](/documentation/platform/sso/overview), [SCIM](/documentation/platform/scim/overview), [LDAP](/documentation/platform/ldap/overview), and other authentication configurations. +- Security and Authentication: A set of setting to enforce or manage [SAML](/documentation/platform/sso/overview), [OIDC](/documentation/platform/sso/overview), [SCIM](/documentation/platform/scim/overview), [LDAP](/documentation/platform/ldap/overview), and other authentication configurations. ![organization settings auth](../../images/platform/organization/organization-settings-auth.png) ## Access Control -The **Access Control** page is where you can manage identities (both people and machines) that are part of your organization. +The **Access Control** page is where you can manage identities (both people and machines) that are part of your organization. You can add or remove additional members as well as modify their permissions. ![organization members](../../images/platform/organization/organization-members.png) ![organization identities](../../images/platform/organization/organization-machine-identities.png) -In the **Organization Roles** tab, you can edit current or create new custom roles for members within the organization. +In the **Organization Roles** tab, you can edit current or create new custom roles for members within the organization. Note that Role-Based Access Management (RBAC) is partly a paid feature. @@ -42,13 +43,14 @@ In the **Organization Roles** tab, you can edit current or create new custom rol Infisical provides immutable roles like `admin`, `member`, etc. at the organization and project level for free. - If you're using Infisical Cloud, the ability to create custom roles is available under the **Pro Tier**. - If you're self-hosting Infisical, then you should contact sales@infisical.com to purchase an enterprise license to use it. +If you're using Infisical Cloud, the ability to create custom roles is available under the **Pro Tier**. +If you're self-hosting Infisical, then you should contact sales@infisical.com to purchase an enterprise license to use it. + ![organization roles](../../images/platform/organization/organization-members-roles.png) -As you can see next, Infisical supports granular permissions that you can tailor to each role. +As you can see next, Infisical supports granular permissions that you can tailor to each role. If you need certain members to only be able to access billing details, for example, then you can assign them that permission only. @@ -66,4 +68,4 @@ This includes the following items: - Receipts: The receipts of monthly/annual invoices. - Billing: The billing details of your organization including payment methods on file, tax IDs (if applicable), etc. -![organization usage and billing](../../images/platform/organization/organization-usage-billing.png) \ No newline at end of file +![organization usage and billing](../../images/platform/organization/organization-usage-billing.png) diff --git a/docs/documentation/platform/pki/alerting.mdx b/docs/documentation/platform/pki/alerting.mdx new file mode 100644 index 0000000000..158c478770 --- /dev/null +++ b/docs/documentation/platform/pki/alerting.mdx @@ -0,0 +1,149 @@ +--- +title: "Alerting" +description: "Learn how to set up alerting for expiring certificates with Infisical" +--- + +## Concept + +In order to ensure that your certificates are always up-to-date and not expired, you can set up alerting for expiring CA and leaf certificates in Infisical. + +## Workflow + +A typical alerting workflow for expiring certificates consists of the following steps: + +1. Creating a PKI/Certificate collection and adding certificates that you wish to monitor for expiration to it. +2. Creating an alert and binding it to the PKI/Certificate collection. As part of the configuration, you specify when the alert should trigger based on the number of days before certificate expiration and the email addresses of the recipients to notify. + +## Guide to Creating an Alert + + + + + + To create a PKI/Certificate collection, head to your Project > Internal + PKI > Alerting > Certificate Collection and press **Create**. + + ![pki create collection](/images/platform/pki/alerting/collection-create.png) + + Give the collection a name and proceed to create the empty collection. + + ![pki create collection](/images/platform/pki/alerting/collection-create-2.png) + + Next, in the Collection Page, add the certificate authorities and leaf certificates + that you wish to monitor for expiration to the collection. + + ![pki add cert to collection](/images/platform/pki/alerting/collection-add-cert.png) + + + To create an alert, head to your Project > Internal PKI > Alerting > Alerts and press **Create**. + + ![pki create alert](/images/platform/pki/alerting/alert-create.png) + + Here, set the **Certificate Collection** to the PKI/Certificate collection you created in the previous step and fill out details for the alert. + + ![pki create alert](/images/platform/pki/alerting/alert-create-2.png) + + Here's some guidance on each field: + + - Name: A name for the alert. + - Collection Collection: The PKI/Certificate collection to bind the alert to from the previous step. + - Alert Before / Unit: The time before certificate expiration to trigger the alert. + - Emails to Alert: A comma-delimited list of email addresses to notify when the alert triggers. + + Finally, press **Create** to create the alert. + + ![pki alerts](/images/platform/pki/alerting/alerts.png) + + Great! You've successfully created a PKI/Certificate collection and an alert to monitor the expiring certificates in the collection. Once the alert triggers, the specified email addresses will be notified. + + + + + + + + 1.1. To create a PKI/Certificate collection, make an API request to the [Create PKI Collection](/api-reference/endpoints/pki-collections/create) API endpoint. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/collections' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "projectId": "", + "name": "My Certificate Collection" + }' + ``` + + ### Sample response + + ```bash Response + { + id: "", + name: "My Certificate Collection", + ... + } + ``` + + 1.2. Next, make an API request to the [Add Collection Item](/api-reference/endpoints/pki-collections/add-item) API endpoint to add a certificate to the collection. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/collections//items' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "type": "certificate", + "itemId": "id-of-certificate" + }' + ``` + + ### Sample response + + ```bash Response + { + id: "", + type: "certificate", + itemId: "id-of-certificate" + ... + } + ``` + + + To create an alert, make an API request to the [Create Alert](/api-reference/endpoints/pki-alerts/create) API endpoint, specifying the PKI/Certificate collection to bind the alert to, the alert configuration, and the email addresses to notify. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/alerts' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "projectId": "", + "pkiCollectionId": "", + "name": "My Alert", + "alertBeforeDays": 30, + "emails": ["johndoe@gmail.com", "janedoe@gmail.com"] + }' + ``` + + ### Sample response + + ```bash Response + { + id: "", + name: "My Alert", + alertBeforeDays: 30, + recipientEmails: "johndoe@gmail.com,janedoe@gmail.com" + ... + } + ``` + + Great! You've successfully created a PKI/Certificate collection and an alert to monitor the expiring certificate in the collection. Once the alert triggers, the specified email addresses will be notified. + + + + + diff --git a/docs/documentation/platform/pki/certificates.mdx b/docs/documentation/platform/pki/certificates.mdx new file mode 100644 index 0000000000..4976b5e184 --- /dev/null +++ b/docs/documentation/platform/pki/certificates.mdx @@ -0,0 +1,324 @@ +--- +title: "Certificates" +sidebarTitle: "Certificates" +description: "Learn how to issue X.509 certificates with Infisical." +--- + +## Concept + +Assuming that you've created a Private CA hierarchy with a root CA and an intermediate CA, you can now issue/revoke X.509 certificates using the intermediate CA. + +
+ +```mermaid +graph TD + A[Root CA] + A --> B[Intermediate CA] + A --> C[Intermediate CA] + B --> D[Leaf Certificate] + C --> E[Leaf Certificate] +``` + +
+ +## Workflow + +The typical workflow for managing certificates consists of the following steps: + +1. Issuing a certificate under an intermediate CA with details like name and validity period. As part of certificate issuance, you can either issue a certificate directly from a CA or do it via a certificate template. +2. Managing certificate lifecycle events such as certificate renewal and revocation. As part of the certificate revocation flow, + you can also query for a Certificate Revocation List [CRL](https://en.wikipedia.org/wiki/Certificate_revocation_list), a time-stamped, signed + data structure issued by a CA containing a list of revoked certificates to check if a certificate has been revoked. + + + Note that this workflow can be executed via the Infisical UI or manually such + as via API. + + +## Guide to Issuing Certificates + +In the following steps, we explore how to issue a X.509 certificate under a CA. + + + + + + + A certificate template is a set of policies for certificates issued under that template; each template is bound to a specific CA and can also be bound to a certificate collection for alerting such that any certificate issued under the template is automatically added to the collection. + + With certificate templates, you can specify, for example, that issued certificates must have a common name (CN) adhering to a specific format like `.*.acme.com` or perhaps that the max TTL cannot be more than 1 year. + + Head to your Project > Certificate Authorities > Your Issuing CA and create a certificate template. + + ![pki certificate template modal](/images/platform/pki/certificate/cert-template-modal.png) + + Here's some guidance on each field: + + - Template Name: A name for the certificate template. + - Issuing CA: The Certificate Authority (CA) that will issue certificates based on this template. + - Certificate Collection (Optional): The certificate collection that certificates should be added to when issued under the template. + - Common Name (CN): A regular expression used to validate the common name in certificate requests. + - Alternative Names (SANs): A regular expression used to validate subject alternative names in certificate requests. + - TTL: The maximum Time-to-Live (TTL) for certificates issued using this template. + - Key Usage: The key usage constraint or default value for certificates issued using this template. + - Extended Key Usage: The extended key usage constraint or default value for certificates issued using this template. + + + To create a certificate, head to your Project > Internal PKI > Certificates and press **Issue** under the Certificates section. + + ![pki issue certificate](/images/platform/pki/certificate/cert-issue.png) + + Here, set the **Certificate Template** to the template from step 1 and fill out the rest of the details for the certificate to be issued. + + ![pki issue certificate modal](/images/platform/pki/certificate/cert-issue-modal.png) + + Here's some guidance on each field: + + - Friendly Name: A friendly name for the certificate; this is only for display and defaults to the common name of the certificate if left empty. + - Common Name (CN): The (common) name for the certificate like `service.acme.com`. + - Alternative Names (SANs): A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses like `app1.acme.com, app2.acme.com`. + - TTL: The lifetime of the certificate in seconds. + - Key Usage: The key usage extension of the certificate. + - Extended Key Usage: The extended key usage extension of the certificate. + + + Note that Infisical PKI supports issuing certificates without certificate templates as well. If this is desired, then you can set the **Certificate Template** field to **None** + and specify the **Issuing CA** and optional **Certificate Collection** fields; the rest of the fields for the issued certificate remain the same. + + That said, we recommend using certificate templates to enforce policies and attach expiration monitoring on issued certificates. + + + + + Once you have created the certificate from step 1, you'll be presented with the certificate details including the **Certificate Body**, **Certificate Chain**, and **Private Key**. + + ![pki certificate body](/images/platform/pki/certificate/cert-body.png) + + + Make sure to download and store the **Private Key** in a secure location as it will only be displayed once at the time of certificate issuance. + The **Certificate Body** and **Certificate Chain** will remain accessible and can be copied at any time. + + + + + + + + + A certificate template is a set of policies for certificates issued under that template; each template is bound to a specific CA and can also be bound to a certificate collection for alerting such that any certificate issued under the template is automatically added to the collection. + + With certificate templates, you can specify, for example, that issued certificates must have a common name (CN) adhering to a specific format like .*.acme.com or perhaps that the max TTL cannot be more than 1 year. + + To create a certificate template, make an API request to the [Create Certificate Template](/api-reference/endpoints/certificate-templates/create) API endpoint, specifying the issuing CA. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/certificate-templates' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "caId": "", + "name": "My Certificate Template", + "commonName": ".*.acme.com", + "subjectAlternativeName": ".*.acme.com", + "ttl": "1y", + }' + ``` + + ### Sample response + + ```bash Response + { + id: "...", + caId: "...", + name: "...", + commonName: "...", + subjectAlternativeName: "...", + ttl: "...", + } + ``` + + + + To create a certificate under the certificate template, make an API request to the [Issue Certificate](/api-reference/endpoints/certificates/issue-cert) API endpoint, + specifying the issuing CA. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/certificates/issue-certificate' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "certificateTemplateId": "", + "commonName": "service.acme.com", + "ttl": "1y", + }' + ``` + + ### Sample response + + ```bash Response + { + certificate: "...", + certificateChain: "...", + issuingCaCertificate: "...", + privateKey: "...", + serialNumber: "..." + } + ``` + + + Note that Infisical PKI supports issuing certificates without certificate templates as well. If this is desired, then you can set the **Certificate Template** field to **None** + and specify the **Issuing CA** and optional **Certificate Collection** fields; the rest of the fields for the issued certificate remain the same. + + That said, we recommend using certificate templates to enforce policies and attach expiration monitoring on issued certificates. + + + + Make sure to store the `privateKey` as it is only returned once here at the time of certificate issuance. The `certificate` and `certificateChain` will remain accessible and can be retrieved at any time. + + + If you have an external private key, you can also create a certificate by making an API request containing a pem-encoded CSR (Certificate Signing Request) to the [Sign Certificate](/api-reference/endpoints/certificates/sign-certificate) API endpoint, specifying the issuing CA. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/certificates/sign-certificate' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "certificateTemplateId": "", + "csr": "...", + "ttl": "1y", + }' + ``` + + ### Sample response + + ```bash Response + { + certificate: "...", + certificateChain: "...", + issuingCaCertificate: "...", + privateKey: "...", + serialNumber: "..." + } + ``` + + + + + + +## Guide to Revoking Certificates + +In the following steps, we explore how to revoke a X.509 certificate under a CA and obtain a Certificate Revocation List (CRL) for a CA. + + + + + + Assuming that you've issued a certificate under a CA, you can revoke it by + selecting the **Revoke Certificate** option for it and specifying the reason + for revocation. + + ![pki revoke certificate](/images/platform/pki/cert-revoke.png) + + ![pki revoke certificate modal](/images/platform/pki/cert-revoke-modal.png) + + + + In order to check the revocation status of a certificate, you can check it + against the CRL of a CA by heading to its Issuing CA and downloading the CRL. + + ![pki view crl](/images/platform/pki/ca-crl.png) + + To verify a certificate against the + downloaded CRL with OpenSSL, you can use the following command: + +```bash +openssl verify -crl_check -CAfile chain.pem -CRLfile crl.pem cert.pem +``` + +Note that you can also obtain the CRL from the certificate itself by +referencing the CRL distribution point extension on the certificate itself. + +To check a certificate against the CRL distribution point specified within it with OpenSSL, you can use the following command: + +```bash +openssl verify -verbose -crl_check -crl_download -CAfile chain.pem cert.pem +``` + + + + + + + + Assuming that you've issued a certificate under a CA, you can revoke it by making an API request to the [Revoke Certificate](/api-reference/endpoints/certificate-authorities/revoke) API endpoint, + specifying the serial number of the certificate and the reason for revocation. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/certificates//revoke' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "revocationReason": "UNSPECIFIED" + }' + ``` + + ### Sample response + + ```bash Response + { + message: "Successfully revoked certificate", + serialNumber: "...", + revokedAt: "..." + } + ``` + + + In order to check the revocation status of a certificate, you can check it against the CRL of the issuing CA. + To obtain the CRLs of the CA, make an API request to the [List CRLs](/api-reference/endpoints/certificate-authorities/crls) API endpoint. + + ### Sample request + + ```bash Request + curl --location --request GET 'https://app.infisical.com/api/v1/pki/ca//crls' \ + --header 'Authorization: Bearer ' + ``` + + ### Sample response + + ```bash Response + [ + { + id: "...", + crl: "..." + }, + ... + ] + ``` + + To verify a certificate against the CRL with OpenSSL, you can use the following command: + + ```bash + openssl verify -crl_check -CAfile chain.pem -CRLfile crl.pem cert.pem + ``` + + + + + + +## FAQ + + + + To renew a certificate, you have to issue a new certificate from the same CA + with the same common name as the old certificate. The original certificate + will continue to be valid through its original TTL unless explicitly + revoked. + + diff --git a/docs/documentation/platform/pki/est.mdx b/docs/documentation/platform/pki/est.mdx new file mode 100644 index 0000000000..ecbd98dd9c --- /dev/null +++ b/docs/documentation/platform/pki/est.mdx @@ -0,0 +1,59 @@ +--- +title: "Enrollment over Secure Transport (EST)" +sidebarTitle: "Enrollment over Secure Transport (EST)" +description: "Learn how to manage certificate enrollment of clients using EST" +--- + +## Concept + +Enrollment over Secure Transport (EST) is a protocol used to automate the secure provisioning of digital certificates for devices and applications over a secure HTTPS connection. It is primarily used when a client device needs to obtain or renew a certificate from a Certificate Authority (CA) on Infisical in a secure and standardized manner. EST is commonly employed in environments requiring strong authentication and encrypted communication, such as in IoT, enterprise networks, and secure web services. + +Infisical's EST service is based on [RFC 7030](https://datatracker.ietf.org/doc/html/rfc7030) and implements the following endpoints: + +- **cacerts** - provides the necessary CA chain for the client to validate certificates issued by the CA. +- **simpleenroll** - allows an EST client to request a new certificate from Infisical's EST server +- **simplereenroll** - similar to the /simpleenroll endpoint but is used for renewing an existing certificate. + +These endpoints are exposed on port 8443 under the .well-known/est path e.g. +`https://app.infisical.com:8443/.well-known/est/estLabel/cacerts` + +## Prerequisites + +- You need to have an existing [CA hierarchy](/documentation/platform/pki/private-ca). +- The client devices need to have a bootstrap/pre-installed certificate. +- The client devices must trust the server certificates used by Infisical's EST server. If the devices are new or lack existing trust configurations, you need to manually establish trust for the appropriate certificates. + - For Infisical Cloud users, the devices must be configured to trust the [Amazon root CA certificates](https://www.amazontrust.com/repository). + +## Guide to configuring EST + +1. Set up a certificate template with your selected issuing CA. This template will define the policies and parameters for certificates issued through EST. For detailed instructions on configuring a certificate template, refer to the certificate templates [documentation](/documentation/platform/pki/certificates#guide-to-issuing-certificates). + +2. Proceed to the certificate template's enrollment settings + ![est enrollment dashboard](/images/platform/pki/est/template-enroll-hover.png) + +3. Select **EST** as the client enrollment method and fill up the remaining fields. + + ![est enrollment modal create](/images/platform/pki/est/template-enrollment-modal.png) + + - **Disable Bootstrap Certificate Validation** - Enable this if your devices are not configured with a bootstrap certificate. + - **Certificate Authority Chain** - This is the certificate chain used to validate your devices' manufacturing/pre-installed certificates. This will be used to authenticate your devices with Infisical's EST server. + - **Passphrase** - This is also used to authenticate your devices with Infisical's EST server. When configuring the clients, use the value defined here as the EST password. + + For security reasons, Infisical authenticates EST clients using both client certificate and passphrase. + +4. Once the configuration of enrollment options is completed, a new **EST Label** field appears in the enrollment settings. This is the value to use as label in the URL when configuring the connection of EST clients to Infisical. + ![est enrollment modal create](/images/platform/pki/est/template-enrollment-est-label.png) + + The complete URL of the supported EST endpoints will look like the following: + + - https://app.infisical.com:8443/.well-known/est/f110f308-9888-40ab-b228-237b12de8b96/cacerts + - https://app.infisical.com:8443/.well-known/est/f110f308-9888-40ab-b228-237b12de8b96/simpleenroll + - https://app.infisical.com:8443/.well-known/est/f110f308-9888-40ab-b228-237b12de8b96/simplereenroll + +## Setting up EST clients + +- To use the EST passphrase in your clients, configure it as the EST password. The EST username can be set to any arbitrary value. +- Use the appropriate client certificates for invoking the EST endpoints. + - For `simpleenroll`, use the bootstrapped/manufacturer client certificate. + - For `simplereenroll`, use a valid EST-issued client certificate. +- When configuring the PKCS#12 objects for the client certificates, only include the leaf certificate and the private key. diff --git a/docs/documentation/platform/pki/overview.mdx b/docs/documentation/platform/pki/overview.mdx new file mode 100644 index 0000000000..259f15a5d6 --- /dev/null +++ b/docs/documentation/platform/pki/overview.mdx @@ -0,0 +1,12 @@ +--- +title: "Internal PKI" +sidebarTitle: "Overview" +description: "Learn how to create a Private CA hierarchy and issue X.509 certificates." +--- + +Infisical can be used to create a Private Certificate Authority (CA) hierarchy and issue X.509 certificates for internal use. This allows you to manage your own PKI infrastructure and issue digital certificates for services, applications, and devices. + +Infisical's internal PKI offering is split into two modules: + +- [Private CA](/documentation/platform/pki/private-ca): Infisical lets you create private CAs, including root and intermediary CAs. +- [Certificates](/documentation/platform/pki/certificates): Infisical allows you to issue X.509 certificates using the private CAs you create. diff --git a/docs/documentation/platform/pki/pki-issuer.mdx b/docs/documentation/platform/pki/pki-issuer.mdx new file mode 100644 index 0000000000..c02e477c6d --- /dev/null +++ b/docs/documentation/platform/pki/pki-issuer.mdx @@ -0,0 +1,250 @@ +--- +title: "Kubernetes Issuer" +sidebarTitle: "Certificates for Kubernetes" +description: "Learn how to automatically provision and manage TLS certificates for in Kubernetes using Infisical PKI" +--- + +## Concept + +The Infisical PKI Issuer is an installable Kubernetes [cert-manager](https://cert-manager.io/) controller that uses Infisical PKI to sign certificate requests. The issuer is perfect for getting X.509 certificates for ingresses and other Kubernetes resources and capable of automatically renewing certificates as needed. + +As part of the workflow, you install `cert-manager`, the Infisical PKI Issuer, and configure resources to represent the connection details to your Infisical PKI and the certificates you wish to issue. Each issued certificate and corresponding private key is made available in a Kubernetes secret. + +We recommend reading the [cert-manager documentation](https://cert-manager.io/docs/) for a fuller understanding of all the moving parts. + +## Workflow + +A typical workflow for using the Infisical PKI Issuer to issue certificates for your Kubernetes resources consists of the following steps: + +1. Creating a machine identity in Infisical. +2. Creating a Kubernetes secret to store the credentials of the machine identity. +3. Installing `cert-manager` into your Kubernetes cluster. +4. Installing the Infisical PKI Issuer controller into your Kubernetes cluster. +5. Creating an `Issuer` or `ClusterIssuer` resource in your Kubernetes cluster to represent the Infisical PKI issuer you wish to use. +6. Creating a `Certificate` resource in your Kubernetes cluster to represent a certificate you wish to issue. As part of this step, you specify the Kubernetes `Secret` to create and store the issued certificate and private key. +7. Consuming the issued certificate across your Kubernetes resources from the specified Kubernetes `Secret`. + +## Guide + +In the following steps, we explore how to install the Infisical PKI Issuer using [kubectl](https://github.com/kubernetes/kubectl) and use it to obtain certificates for your Kubernetes resources. + + + + + Follow the instructions [here](/documentation/platform/identities/universal-auth) to configure a [machine identity](/documentation/platform/identities/machine-identities) in Infisical with Universal Auth. + + By the end of this step, you should have a **Client ID** and **Client Secret** on hand as part of the Universal Auth configuration for the Infisical PKI Issuer to authenticate with Infisical; this will be useful in steps 4 and 5. + + + Currently, the Infisical PKI Issuer only supports authenticating with Infisical via the [Universal Auth](/documentation/platform/identities/universal-auth) authentication method. + + We're planning to add support for [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth) in the near future. + + + + Install `cert-manager` into your Kubernetes cluster by following the instructions [here](https://cert-manager.io/docs/installation/) or by running the following command: + + ```bash + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml + ``` + + + Install the Infisical PKI Issuer controller into your Kubernetes cluster by running the following command: + + ```bash + kubectl apply -f https://raw.githubusercontent.com/Infisical/infisical-issuer/main/build/install.yaml + ``` + + + Start by creating a Kubernetes `Secret` containing the **Client Secret** from step 1. As mentioned previously, this will be used by the Infisical PKI issuer to authenticate with Infisical. + + + + ```bash + kubectl create secret generic issuer-infisical-client-secret \ + --namespace \ + --from-literal=clientSecret= + ``` + + + ```yaml secret-issuer.yaml + apiVersion: v1 + kind: Secret + metadata: + name: issuer-infisical-client-secret + namespace: + data: + clientSecret: + ``` + + ```bash + kubectl apply -f secret-issuer.yaml + ``` + + + + + Next, create the Infisical PKI Issuer by filling out `url`, `clientId`, either `caId` or `certificateTemplateId`, and applying the following configuration file for the `Issuer` resource. + This configuration file specifies the connection details to your Infisical PKI CA to be used for issuing certificates. + + ```yaml infisical-issuer.yaml + apiVersion: infisical-issuer.infisical.com/v1alpha1 + kind: Issuer + metadata: + name: issuer-infisical + namespace: + spec: + url: "https://app.infisical.com" # the URL of your Infisical instance + caId: # the ID of the CA you want to use to issue certificates + certificateTemplateId: # the ID of the certificate template you want to use to issue certificates against + authentication: + universalAuth: + clientId: # the Client ID from step 1 + secretRef: # reference to the Secret created in step 4 + name: "issuer-infisical-client-secret" + key: "clientSecret" + ``` + + ``` + kubectl apply -f infisical-issuer.yaml + ``` + + + The Infisical PKI Issuer supports issuing certificates against a specific CA or a specific certificate template. + + For this reason, you should only fill in the `caId` or the `certificateTemplateId` field but not both. + + We recommend using the `certificateTemplateId` field to issue certificates against a specific [certificate template](/documentation/platform/pki/certificate-templates) + since templates let you enforce constraints on issued certificates and may have alerting policies bound to them. + + + You can check that the issuer was created successfully by running the following command: + + ```bash + kubectl get issuers.infisical-issuer.infisical.com -n -o wide + ``` + + ```bash + NAME AGE + issuer-infisical 21h + ``` + + + An `Issuer` is a namespaced resource, and it is not possible to issue certificates from an `Issuer` in a different namespace. + This means you will need to create an `Issuer` in each namespace you wish to obtain `Certificates` in. + + If you want to create a single `Issuer` that can be consumed in multiple namespaces, you should consider creating a `ClusterIssuer` resource. This is almost identical to the `Issuer` resource, however is non-namespaced so it can be used to issue `Certificates` across all namespaces. + + You can read more about the `Issuer` and `ClusterIssuer` resources [here](https://cert-manager.io/docs/configuration/). + + + + + Finally, create a `Certificate` by applying the following configuration file. + This configuration file specifies the details of the (end-entity/leaf) certificate to be issued. + + ```yaml certificate-issuer.yaml + apiVersion: cert-manager.io/v1 + kind: Certificate + metadata: + name: certificate-by-issuer + namespace: + spec: + commonName: certificate-by-issuer.example.com # the common name for the certificate + secretName: certificate-by-issuer # the name of the Kubernetes Secret to create and store the certificate and private key in + issuerRef: + name: issuer-infisical + group: infisical-issuer.infisical.com + kind: Issuer + privateKey: # the algorithm and key size to use + algorithm: ECDSA + size: 256 + duration: 48h # the ttl for the certificate + renewBefore: 12h # the time before the certificate expiry that the certificate should be automatically renewed + ``` + + The above sample configuration file specifies a certificate to be issued with the common name `certificate-by-issuer.example.com` and ECDSA private key using the P-256 curve, valid for 48 hours; the certificate will be automatically renewed by `cert-manager` 12 hours before expiry. + The certificate is issued by the issuer `issuer-infisical` created in the previous step and the resulting certificate and private key will be stored in a secret named `certificate-by-issuer`. + + Note that the full list of the fields supported on the `Certificate` resource can be found in the API reference documentation [here](https://cert-manager.io/docs/reference/api-docs/#cert-manager.io/v1.CertificateSpec). + + You can check that the certificate was created successfully by running the following command: + + ```bash + kubectl get certificates -n -o wide + ``` + + ```bash + NAME READY SECRET ISSUER STATUS AGE + certificate-by-issuer True certificate-by-issuer issuer-infisical Certificate is up to date and has not expired 20h + ``` + + + Since the actual certificate and private key are stored in a Kubernetes secret, we can check that the secret was created successfully by running the following command: + + ```bash + kubectl get secret certificate-by-issuer -n + ``` + + ```bash + NAME TYPE DATA AGE + certificate-by-issuer kubernetes.io/tls 2 26h + ``` + + We can `describe` the secret to get more information about it: + + ```bash + kubectl describe secret certificate-by-issuer -n default + ``` + + ```bash + Name: certificate-by-issuer + Namespace: default + Labels: controller.cert-manager.io/fao=true + Annotations: cert-manager.io/alt-names: + cert-manager.io/certificate-name: certificate-by-issuer + cert-manager.io/common-name: certificate-by-issuer.example.com + cert-manager.io/ip-sans: + cert-manager.io/issuer-group: infisical-issuer.infisical.com + cert-manager.io/issuer-kind: Issuer + cert-manager.io/issuer-name: issuer-infisical + cert-manager.io/uri-sans: + + Type: kubernetes.io/tls + + Data + ==== + ca.crt: 1306 bytes + tls.crt: 2380 bytes + tls.key: 227 bytes + ``` + + Here, `ca.crt` is the Root CA certificate, `tls.crt` is the requested certificate followed by the certificate chain, and `tls.key` is the private key for the certificate. + + We can decode the certificate and print it out using `openssl`: + + ```bash + kubectl get secret certificate-by-issuer -n default -o jsonpath='{.data.tls\.crt}' | base64 --decode | openssl x509 -text -noout + ``` + + In any case, the certificate is ready to be used as Kubernetes Secret by your Kubernetes resources. + + + +## FAQ + + + + The full list of the fields supported on the `Certificate` resource can be found in the API reference documentation [here](https://cert-manager.io/docs/reference/api-docs/#cert-manager.io/v1.CertificateSpec). + + + Currently, not all fields are supported by the Infisical PKI Issuer. + + + + Yes. `cert-manager` will automatically renew certificates according to the `renewBefore` threshold of expiry as + specified in the corresponding `Certificate` resource. + + You can read more about the `renewBefore` field [here](https://cert-manager.io/docs/reference/api-docs/#cert-manager.io/v1.CertificateSpec). + + \ No newline at end of file diff --git a/docs/documentation/platform/pki/private-ca.mdx b/docs/documentation/platform/pki/private-ca.mdx new file mode 100644 index 0000000000..d7f3f896c4 --- /dev/null +++ b/docs/documentation/platform/pki/private-ca.mdx @@ -0,0 +1,337 @@ +--- +title: "Private CA" +sidebarTitle: "Private CA" +description: "Learn how to create a Private CA hierarchy with Infisical." +--- + +## Concept + +The first step to creating your Internal PKI is to create a Private Certificate Authority (CA) hierarchy that is a structure of entities +used to issue digital certificates for services, applications, and devices. + +
+ +```mermaid +graph TD + A[Root CA] + A --> B[Intermediate CA] + A --> C[Intermediate CA] +``` + +
+ +## Workflow + +A typical workflow for setting up a Private CA hierarchy consists of the following steps: + +1. Configuring an Infisical root CA with details like name, validity period, and path length — This step is optional if you wish to use an external root CA. +2. Configuring and chaining intermediate CA(s) with details like name, validity period, path length, and imported certificate to your Root CA. +3. Managing the CA lifecycle events such as CA succession. + + + Note that this workflow can be executed via the Infisical UI or manually such + as via API. If manually executing the workflow, you may have to create a + Certificate Signing Request (CSR) for the intermediate CA, create an + intermediate certificate using the root CA private key and CSR, and import the + intermediate certificate back to the intermediate CA as part of Step 2. + + +## Guide to Creating a CA Hierarchy + +In the following steps, we explore how to create a simple Private CA hierarchy +consisting of an (optional) root CA and an intermediate CA. + + + + + + If you wish to use an external root CA, you can skip this step and head to step 2 to create an intermediate CA. + + To create a root CA, head to your Project > Internal PKI > Certificate Authorities and press **Create CA**. + + ![pki create ca](/images/platform/pki/ca/ca-create.png) + + Here, set the **CA Type** to **Root** and fill out details for the root CA. + + ![pki create root ca](/images/platform/pki/ca/ca-create-root.png) + + Here's some guidance on each field: + + - Valid Until: The date until which the CA is valid in the date time string format specified [here](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date#date_time_string_format). For example, the following formats would be valid: `YYYY`, `YYYY-MM`, `YYYY-MM-DD`, `YYYY-MM-DDTHH:mm:ss.sssZ`. + - Path Length: The maximum number of intermediate CAs that can be chained to this CA. A path of `-1` implies no limit; a path of `0` implies no intermediate CAs can be chained. + - Key Algorithm: The type of public key algorithm and size, in bits, of the key pair that the CA creates when it issues a certificate. Supported key algorithms are `RSA 2048`, `RSA 4096`, `ECDSA P-256`, and `ECDSA P-384` with the default being `RSA 2048`. + - Friendly Name: A friendly name for the CA; this is only for display and defaults to the subject of the CA if left empty. + - Organization (O): The organization name. + - Country (C): The country code. + - State or Province Name: The state or province. + - Locality Name: The city or locality. + - Common Name: The name of the CA. + - Require Template for Certificate Issuance: Whether or not certificates for this CA can only be issued through certificate templates (recommended). + + + The Organization, Country, State or Province Name, Locality Name, and Common Name make up the **Distinguished Name (DN)** or **subject** of the CA. + At least one of these fields must be filled out. + + + + 2.1. To create an intermediate CA, press **Create CA** again but this time specifying the **CA Type** to be **Intermediate**. Fill out the details for the intermediate CA. + + ![pki create intermediate ca](/images/platform/pki/ca/ca-create-intermediate.png) + + 2.2. Next, press the **Install Certificate** option on the intermediate CA from step 1.1. + + ![pki install cert opt](/images/platform/pki/ca/ca-install-intermediate-opt.png) + + 2.3a. If you created a root CA in step 1, select **Infisical CA** for the **Parent CA Type** field. + + Next, set the **Parent CA** to the root CA created in step 1 and configure the intended **Valid Until** and **Path Length** fields on the intermediate CA; feel free to use the prefilled values. + + ![pki install cert](/images/platform/pki/ca/ca-install-intermediate.png) + + Here's some guidance on each field: + + - Parent CA: The parent CA to which this intermediate CA will be chained. In this case, it should be the root CA created in step 1. + - Valid Until: The date until which the CA is valid in the date time string format specified [here](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date#date_time_string_format). The date must be within the validity period of the parent CA. + - Path Length: The maximum number of intermediate CAs that can be chained to this CA. The path length must be less than the path length of the parent CA. + + Finally, press **Install** to chain the intermediate CA to the root CA; this creates a Certificate Signing Request (CSR) for the intermediate CA, creates an intermediate certificate using the root CA private key and CSR, and imports the signed certificate back to the intermediate CA. + + ![pki cas](/images/platform/pki/ca/cas.png) + + Great! You've successfully created a Private CA hierarchy with a root CA and an intermediate CA. + Now check out the [Certificates](/documentation/platform/pki/certificates) page to learn more about how to issue X.509 certificates using the intermediate CA. + + 2.3b. If you have an external root CA, select **External CA** for the **Parent CA Type** field. + + Next, use the provided intermediate CSR to generate a certificate from your external root CA and paste the PEM-encoded certificate back into the **Certificate Body** field; the PEM-encoded external root CA certificate should be pasted under the **Certificate Chain** field. + + ![pki ca csr](/images/platform/pki/ca/ca-install-intermediate-csr.png) + + Finally, press **Install** to import the certificate and certificate chain as part of the installation step for the intermediate CA + + Great! You've successfully created a Private CA hierarchy with an intermediate CA chained to an external root CA. + Now check out the [Certificates](/documentation/platform/pki/certificates) page to learn more about how to issue X.509 certificates using the intermediate CA. + + + + + + + + If you wish to use an external root CA, you can skip this step and head to step 2 to create an intermediate CA. + + To create a root CA, make an API request to the [Create CA](/api-reference/endpoints/certificate-authorities/create) API endpoint, specifying the `type` as `root`. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "projectSlug": "", + "type": "root", + "commonName": "My Root CA" + }' + ``` + + ### Sample response + + ```bash Response + { + ca: { + id: "", + type: "root", + commonName: "My Root CA", + ... + } + } + ``` + + By default, Infisical creates a root CA with the `RSA_2048` key algorithm, validity period of 10 years, with no restrictions on path length; + you may override these defaults by specifying your own options when making the API request. + + + + 2.1. To create an intermediate CA, make an API request to the [Create CA](/api-reference/endpoints/certificate-authorities/create) API endpoint, specifying the `type` as `intermediate`. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "projectSlug": "", + "type": "intermediate", + "commonName": "My Intermediate CA" + }' + ``` + + ### Sample response + + ```bash Response + { + ca: { + id: "", + type: "intermediate", + commonName: "My Intermediate CA", + ... + } + } + ``` + + 2.2. Next, get a certificate signing request from the intermediate CA by making an API request to the [Get CSR](/api-reference/endpoints/certificate-authorities/csr) API endpoint. + + ### Sample request + + ```bash Request + curl --location --request GET 'https://app.infisical.com/api/v1/pki/ca//csr' \ + --header 'Authorization: Bearer ' \ + --data-raw '' + ``` + + ### Sample response + + ```bash Response + { + csr: "..." + } + ``` + + If using an external root CA, then use the CSR to generate a certificate for the intermediate CA using your external root CA and skip to step 2.4. + + 2.3. Next, create an intermediate certificate by making an API request to the [Sign Intermediate](/api-reference/endpoints/certificate-authorities/sign-intermediate) API endpoint + containing the CSR from step 2.2, referencing the root CA created in step 1. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca//sign-intermediate' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "csr": "", + "notAfter": "2029-06-12" + }' + ``` + + ### Sample response + + ```bash Response + { + certificate: "...", + certificateChain: "...", + issuingCaCertificate: "...", + serialNumber: "...", + } + ``` + + + The `notAfter` value must be within the validity period of the root CA that is if the root CA is valid until `2029-06-12`, the intermediate CA must be valid until a date before `2029-06-12`. + + + 2.4. Finally, import the intermediate certificate and certificate chain from step 2.3 back to the intermediate CA by making an API request to the [Import Certificate](/api-reference/endpoints/certificate-authorities/import-cert) API endpoint. + + If using an external root CA, then import the generated certificate and root CA certificate under certificate chain back into the intermediate CA. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca//import-certificate' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "certificate": "", + "certificateChain": "" + }' + ``` + + ### Sample response + + ```bash Response + { + message: "Successfully imported certificate to CA", + caId: "..." + } + ``` + + Great! You’ve successfully created a Private CA hierarchy with a root CA and an intermediate CA. Now check out the Certificates page to learn more about how to issue X.509 certificates using the intermediate CA. + + + + + + +## Guide to CA Renewal + +In the following steps, we explore how to renew a CA certificate. + + + If renewing an intermediate CA chained to an Infisical CA, then Infisical will + automate the process of generating a new certificate for the intermediate CA for you. + +If renewing an intermediate CA chained to an external parent CA, you'll be +required to generate a new certificate from the external parent CA and manually import +the certificate back to the intermediate CA. + + + + + + Head to the CA Page of the CA you wish you renew and press **Renew CA** on + the left side. ![pki ca renewal + page](/images/platform/pki/ca-renewal-page.png) Input a new **Valid Until** + date to be used for the renewed CA certificate and press **Renew** to renew + the CA. ![pki ca renewal. modal](/images/platform/pki/ca-renewal-modal.png) + + The new **Valid Until** date must be within the validity period of the + parent CA. + + + + + To renew a CA certificate, make an API request to the [Renew CA](/api-reference/endpoints/certificate-authorities/renew) API endpoint, specifying the new `notAfter` date for the CA. + + ### Sample request + + ```bash Request + curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca//renew' \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "type": "existing", + "notAfter": "2029-06-12" + }' + ``` + + ### Sample response + + ```bash Response + { + certificate: "...", + certificateChain: "...", + serialNumber: "..." + } + ``` + + + + +## FAQ + + + + Infisical supports `RSA 2048`, `RSA 4096`, `ECDSA P-256`, `ECDSA P-384` key + algorithms specified at the time of creating a CA. + + + At the moment, Infisical only supports CA renewal via same key pair. We + anticipate supporting CA renewal via new key pair in the coming month. + + + Yes. You may obtain a CSR from the Intermediate CA and use it to generate a + certificate from your external CA. The certificate, along with the external + CA certificate chain, can be imported back to the Intermediate CA as part of + the CA installation step. + + diff --git a/docs/documentation/platform/pr-workflows.mdx b/docs/documentation/platform/pr-workflows.mdx index 9df1236127..d582c838bf 100644 --- a/docs/documentation/platform/pr-workflows.mdx +++ b/docs/documentation/platform/pr-workflows.mdx @@ -18,16 +18,26 @@ In a similar way, to solve the above-mentioned issues, Infisical provides a feat ### Setting a policy -First, you would need to create a set of policies for a certain environment. In the example below, a generic policy for a production environment is shown. In this case, any user who submits a change to `prod` would first have to get an approval by a predefined approver (or multiple approvers). +First, you would need to create a set of policies for a certain environment. In the example below, a generic change policy for a production environment is shown. In this case, any user who submits a change to `prod` would first have to get an approval by a predefined approver (or multiple approvers). ![create secret update policy](../../images/platform/pr-workflows/secret-update-policy.png) +### Policy enforcement levels + +The enforcement level determines how strict the policy is. A **Hard** enforcement level means that any change that matches the policy will need full approval prior merging. A **Soft** enforcement level allows for break glass functionality on the request. If a change request is bypassed, the approvers will be notified via email. + +### Example of creating a change policy + +When creating a policy, you can choose the type of policy you want to create. In this case, we will be creating a `Change Policy`. Other types of policies include `Access Policy` that creates policies for **[Access Requests](/documentation/platform/access-controls/access-requests)**. + +![create panel secret update policy](../../images/platform/pr-workflows/create-change-policy.png) + ### Example of updating secrets with Approval workflows When a user submits a change to an enviropnment that is under a particular policy, a corresponsing change request will go to a predefined approver (or multiple approvers). ![secret update change requests](../../images/platform/pr-workflows/secret-update-request.png) -An approver is notified by email and/or Slack as soon as the request is initiated. In the Infisical Dashboard, they will be able to `approve` and `merge` (or `deny`) a request for a change in a particular environment. After that, depending on the workflows setup, the change will be automatically propagated to the right applications (e.g., using [Infisical Kubernetes Operator](https://infisical.com/docs/integrations/platforms/kubernetes)). +Approvers are notified by email and/or Slack as soon as the request is initiated. In the Infisical Dashboard, they will be able to `approve` and `merge` (or `deny`) a request for a change in a particular environment. After that, depending on the workflows setup, the change will be automatically propagated to the right applications (e.g., using [Infisical Kubernetes Operator](https://infisical.com/docs/integrations/platforms/kubernetes)). ![secrets update pull request](../../images/platform/pr-workflows/secret-update-pr.png) \ No newline at end of file diff --git a/docs/documentation/platform/project-templates.mdx b/docs/documentation/platform/project-templates.mdx new file mode 100644 index 0000000000..d84c6bdc1c --- /dev/null +++ b/docs/documentation/platform/project-templates.mdx @@ -0,0 +1,147 @@ +--- +title: "Project Templates" +sidebarTitle: "Project Templates" +description: "Learn how to manage and apply project templates" +--- + +## Concept + +Project Templates streamline your ability to set up projects by providing customizable templates to configure projects quickly with a predefined set of environments and roles. + + + Project Templates is a paid feature. + If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical, + then you should contact team@infisical.com to purchase an enterprise license to use it. + + +## Workflow + +The typical workflow for using Project Templates consists of the following steps: + +1. Creating a project template: As part of this step, you will configure a set of environments and roles to be created when applying this template to a project. +2. Using a project template: When creating new projects, optionally specify a project template to provision the project with the configured roles and environments. + + + Note that this workflow can be executed via the Infisical UI or through the API. + + +## Guide to Creating a Project Template + +In the following steps, we'll explore how to set up a project template. + + + + + + Navigate to the Project Templates tab on the Organization Settings page and tap on the **Add Template** button. + ![project template add button](/images/platform/project-templates/project-template-add-button.png) + + Specify your template details. Here's some guidance on each field: + + - Name: A slug-friendly name for the template. + - Description: An optional description of the intended usage of this template. + + ![project template create modal](/images/platform/project-templates/project-template-create.png) + + + Once your template is created, you'll be directed to the configuration section. + ![project template edit form](/images/platform/project-templates/project-template-edit-form.png) + + Customize the environments and roles to your needs. + ![project template customized](/images/platform/project-templates/project-template-customized.png) + + + Be sure to save your environment and role changes. + + + + + + To create a project template, make an API request to the [Create Project Template](/api-reference/endpoints/project-templates/create) API endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/project-templates \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-project-template", + "description": "...", + "environments": "[...]", + "roles": "[...]", + }' + ``` + + ### Sample response + + ```bash Response + { + "projectTemplate": { + "id": "", + "name": "my-project-template", + "description": "...", + "environments": "[...]", + "roles": "[...]", + "orgId": "", + "createdAt": "2023-11-07T05:31:56Z", + "updatedAt": "2023-11-07T05:31:56Z", + } + } + ``` + + + + +## Guide to Using a Project Template + +In the following steps, we'll explore how to use a project template when creating a project. + + + + When creating a new project, select the desired template from the dropdown menu in the create project modal. + ![kms key options](/images/platform/project-templates/project-template-apply.png) + + Your project will be provisioned with the configured template roles and environments. + + + To use a project template, make an API request to the [Create Project](/api-reference/endpoints/workspaces/create-workspace) API endpoint with the specified template name included. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v2/workspace \ + --header 'Content-Type: application/json' \ + --data '{ + "projectName": "My Project", + "template": "", // defaults to "default" + }' + ``` + + ### Sample response + + ```bash Response + { + "project": { + "id": "", + "environments": "[...]", // configured environments + ... + } + } + ``` + + + Note that configured roles are not included in the project response. + + + + + +## FAQ + + + + No. Project templates only apply at the time of project creation. + + diff --git a/docs/documentation/platform/scim/azure.mdx b/docs/documentation/platform/scim/azure.mdx index ff46fe4e7e..74a6c20308 100644 --- a/docs/documentation/platform/scim/azure.mdx +++ b/docs/documentation/platform/scim/azure.mdx @@ -28,6 +28,13 @@ Prerequisites: ![SCIM copy token](/images/platform/scim/scim-copy-token.png)
+ + In Azure, navigate to Enterprise Application > Users and Groups. Add any users and/or groups to your application that you would like + to be provisioned over to Infisical. + + ![SCIM Azure Users and Groups](/images/platform/scim/azure/scim-azure-add-users-and-groups.png) + + In Azure, head to your Enterprise Application > Provisioning > Overview and press **Get started**. @@ -39,7 +46,7 @@ Prerequisites: - Tenant URL: Input **SCIM URL** from Step 1. - Secret Token: Input the **New SCIM Token** from Step 1. - Afterwards, press the **Test Connection** button to check that SCIM is configured properly. + Afterwards, click **Enable SCIM** and press the **Test Connection** button to check that SCIM is configured properly. ![SCIM Azure](/images/platform/scim/azure/scim-azure-config.png) @@ -71,4 +78,4 @@ Prerequisites: For this reason, SCIM-provisioned users are initialized but must finish setting up their account when logging in the first time by creating a master encryption/decryption key. With this implementation, IdPs and SCIM providers cannot and will not have access to the decryption key needed to decrypt your secrets. - \ No newline at end of file + diff --git a/docs/documentation/platform/scim/group-mappings.mdx b/docs/documentation/platform/scim/group-mappings.mdx new file mode 100644 index 0000000000..acce52e1e2 --- /dev/null +++ b/docs/documentation/platform/scim/group-mappings.mdx @@ -0,0 +1,26 @@ +--- +title: "SCIM Group Mappings" +description: "Learn how to enhance your SCIM implementation using group mappings" +--- + + + SCIM provisioning, and by extension group mapping, is a paid feature. + + If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical, + then you should contact sales@infisical.com to purchase an enterprise license to use it. + + +## SCIM Group to Organization Role Mapping + +By default, when users are provisioned via SCIM, they will be assigned the default organization role configured in [Organization General Settings](/documentation/platform/organization#settings). + +For more precise control over membership roles, you can set up SCIM Group to Organization Role Mappings. This enables you to assign specific roles based on the group from which a user is provisioned. + +![SCIM Group Mapping](/images/platform/scim/scim-group-mapping.png) + +To configure a mapping, simply enter the SCIM group's name and select the role you would like users to be assigned from this group. Be sure +to tap **Update Mappings** once complete. + + + SCIM Group Mappings only apply when users are first provisioned. Previously provisioned users will not be affected, allowing you to customize user roles after they are added. + diff --git a/docs/documentation/platform/secret-rotation/mssql.mdx b/docs/documentation/platform/secret-rotation/mssql.mdx new file mode 100644 index 0000000000..c34bb9034b --- /dev/null +++ b/docs/documentation/platform/secret-rotation/mssql.mdx @@ -0,0 +1,139 @@ +--- +title: "Microsoft SQL Server" +description: "Learn how to automatically rotate Microsoft SQL Server user passwords." +--- + +The Infisical SQL Server secret rotation allows you to automatically rotate your database users' passwords at a predefined interval. + +## Prerequisites + +1. Create two SQL Server logins and database users with the required permissions. We'll refer to them as `user-a` and `user-b`. +2. Create another SQL Server login with permissions to alter logins for `user-a` and `user-b`. We'll refer to this as the `admin` login. + +Here's how to set up the prerequisites: + +```sql +-- Create the logins (at server level) +CREATE LOGIN [user-a] WITH PASSWORD = 'ComplexPassword1'; +CREATE LOGIN [user-b] WITH PASSWORD = 'ComplexPassword2'; + +-- Create database users for the logins (in your specific database) +USE [YourDatabase]; +CREATE USER [user-a] FOR LOGIN [user-a]; +CREATE USER [user-b] FOR LOGIN [user-b]; + +-- Grant necessary permissions to the users +GRANT SELECT, INSERT, UPDATE, DELETE ON SCHEMA::dbo TO [user-a]; +GRANT SELECT, INSERT, UPDATE, DELETE ON SCHEMA::dbo TO [user-b]; + +-- Create admin login with permission to alter other logins +CREATE LOGIN [admin] WITH PASSWORD = 'AdminComplexPassword'; +CREATE USER [admin] FOR LOGIN [admin]; + +-- Grant permission to alter any login +GRANT ALTER ANY LOGIN TO [admin]; +``` + +To learn more about SQL Server's permission system, please visit this [documentation](https://learn.microsoft.com/en-us/sql/relational-databases/security/authentication-access/getting-started-with-database-engine-permissions). + +## How it works + +1. Infisical connects to your database using the provided `admin` login credentials. +2. A random value is generated and the password for `user-a` is updated with the new value. +3. The new password is then tested by logging into the database. +4. If test is successful, it's saved to the output secret mappings so that rest of the system gets the newly rotated value(s). +5. The process is then repeated for `user-b` on the next rotation. +6. The cycle repeats until secret rotation is deleted/stopped. + +## Rotation Configuration + + + + Head over to Secret Rotation configuration page of your project by clicking on `Secret Rotation` in the left side bar + + + + + SQL Server admin username + + + + SQL Server admin password + + + + SQL Server host url (e.g., your-server.database.windows.net) + + + + Database port number (default: 1433) + + + + Database name (default: master) + + + + The first login name to rotate - `user-a` + + + + The second login name to rotate - `user-b` + + + + Optional database certificate to connect with database + + + + + When a secret rotation is successful, the updated values needs to be saved to an existing key(s) in your project. + + + The environment where the rotated credentials should be mapped to. + + + + The secret path where the rotated credentials should be mapped to. + + + + What interval should the credentials be rotated in days. + + + + Select an existing secret key where the rotated database username value should be saved to. + + + + Select an existing select key where the rotated database password value should be saved to. + + + + + +## FAQ + + + + When a system has multiple nodes by horizontal scaling, redeployment doesn't happen instantly. + + This means that when the secrets are rotated, and the redeployment is triggered, the existing system will still be using the old credentials until the change rolls out. + + To avoid causing failure for them, the old credentials are not removed. Instead, in the next rotation, the previous user's credentials are updated. + + + + The admin account is used by Infisical to update the credentials for `user-a` and `user-b`. + + You don't need to grant all permissions for your admin account but rather just the permission to alter logins (ALTER ANY LOGIN). + + + + When using Azure SQL Database, you'll need to: + + 1. Use the full server name as your host (e.g., your-server.database.windows.net) + 2. Ensure your admin account is either the Azure SQL Server admin or an Azure AD account with appropriate permissions + 3. Configure your Azure SQL Server firewall rules to allow connections from Infisical's IP addresses + + diff --git a/docs/documentation/platform/secret-sharing.mdx b/docs/documentation/platform/secret-sharing.mdx index 680751820a..4ff3a326b2 100644 --- a/docs/documentation/platform/secret-sharing.mdx +++ b/docs/documentation/platform/secret-sharing.mdx @@ -5,7 +5,7 @@ description: "Learn how to share time & view-count bound secrets securely with a --- Developers frequently need to share secrets with team members, contractors, or other third parties, which can be risky due to potential leaks or misuse. -Infisical offers a secure solution for sharing secrets over the internet in a time and view count bound manner. +Infisical offers a secure solution for sharing secrets over the internet in a time and view count bound manner. It is possible to share secrets without signing up via [share.infisical.com](https://share.infisical.com) or via Infisical Dashboard (which has more advanced funcitonality). With its zero-knowledge architecture, secrets shared via Infisical remain unreadable even to Infisical itself. @@ -21,7 +21,8 @@ With its zero-knowledge architecture, secrets shared via Infisical remain unread zero knowledge architecture. -3. Click on the **Share Secret** button. Set the secret, its expiration time as well as the number of views allowed. It expires as soon as any of the conditions are met. +3. Click on the **Share Secret** button. Set the secret, its expiration time and specify if the secret can be viewed only once. It expires as soon as any of the conditions are met. +Also, specify if the secret can be accessed by anyone or only people within your organization. ![Add View-Bound Sharing Secret](../../images/platform/secret-sharing/create-new-secret.png) diff --git a/docs/documentation/platform/sso/auth0-oidc.mdx b/docs/documentation/platform/sso/auth0-oidc.mdx new file mode 100644 index 0000000000..9419d0976b --- /dev/null +++ b/docs/documentation/platform/sso/auth0-oidc.mdx @@ -0,0 +1,86 @@ +--- +title: "Auth0 OIDC" +description: "Learn how to configure Auth0 OIDC for Infisical SSO." +--- + + + Auth0 OIDC SSO is a paid feature. If you're using Infisical Cloud, then it is + available under the **Pro Tier**. If you're self-hosting Infisical, then you + should contact sales@infisical.com to purchase an enterprise license to use + it. + + + + + 1.1. From the Application's Page, navigate to the settings tab of the Auth0 application you want to integrate with Infisical. + ![OIDC auth0 list of applications](../../../images/sso/auth0-oidc/application-settings.png) + + 1.2. In the Application URIs section, set the **Application Login URI** and **Allowed Web Origins** fields to `https://app.infisical.com` and the **Allowed Callback URL** field to `https://app.infisical.com/api/v1/sso/oidc/callback`. + ![OIDC auth0 create application uris](../../../images/sso/auth0-oidc/application-uris.png) + ![OIDC auth0 create application origin](../../../images/sso/auth0-oidc/application-origin.png) + + If you’re self-hosting Infisical, then you will want to replace https://app.infisical.com with your own domain. + + + Once done, click **Save Changes**. + + 1.3. Proceed to the Connections Tab and enable desired connections. + ![OIDC auth0 application connections](../../../images/sso/auth0-oidc/application-connections.png) + + + + 2.1. From the application settings page, retrieve the **Client ID** and **Client Secret** + ![OIDC auth0 application credential](../../../images/sso/auth0-oidc/application-credential.png) + + 2.2. In the advanced settings (bottom-most section), retrieve the **OpenID Configuration URL** from the Endpoints tab. + ![OIDC auth0 application oidc url](../../../images/sso/auth0-oidc/application-urls.png) + + Keep these values handy as we will need them in the next steps. + + + + 3.1. Back in Infisical, in the Organization settings > Security > OIDC, click **Connect**. + ![OIDC auth0 manage org Infisical](../../../images/sso/auth0-oidc/org-oidc-overview.png) + + 3.2. For configuration type, select **Discovery URL**. Then, set **Discovery Document URL**, **Client ID**, and **Client Secret** from step 2.1 and 2.2. + ![OIDC auth0 paste values into Infisical](../../../images/sso/auth0-oidc/org-update-oidc.png) + + Once you've done that, press **Update** to complete the required configuration. + + + + Enabling OIDC allows members in your organization to log into Infisical via Auth0. + + ![OIDC auth0 enable OIDC](../../../images/sso/auth0-oidc/enable-oidc.png) + + + + Enforcing OIDC SSO ensures that members in your organization can only access Infisical + by logging into the organization via Auth0. + + To enforce OIDC SSO, you're required to test out the OpenID connection by successfully authenticating at least one Auth0 user with Infisical. + Once you've completed this requirement, you can toggle the **Enforce OIDC SSO** button to enforce OIDC SSO. + + + We recommend ensuring that your account is provisioned using the application in Auth0 + prior to enforcing OIDC SSO to prevent any unintended issues. + + + + + + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite OIDC login. + + + + If you're configuring OIDC SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) + diff --git a/docs/documentation/platform/sso/azure.mdx b/docs/documentation/platform/sso/azure.mdx index cbd5a7d0e6..185d5fcfb0 100644 --- a/docs/documentation/platform/sso/azure.mdx +++ b/docs/documentation/platform/sso/azure.mdx @@ -49,8 +49,8 @@ description: "Learn how to configure Microsoft Entra ID for Infisical SSO." Back in the **Set up Single Sign-On with SAML** screen, select **Edit** in the **Attributes & Claims** section and configure the following map: - `email -> user.userprinciplename` - - `firstName -> user.firstName` - - `lastName -> user.lastName` + - `firstName -> user.givenname` + - `lastName -> user.surname` ![Azure SAML edit attributes and claims](../../../images/sso/azure/edit-attributes-claims.png) @@ -62,7 +62,7 @@ description: "Learn how to configure Microsoft Entra ID for Infisical SSO." ![Azure SAML edit certificate signing option](../../../images/sso/azure/edit-saml-certificate-2.png) - + In the **Set up Single Sign-On with SAML** screen, copy the **Login URL** and **SAML Certificate** to use when finishing configuring Azure SAML in Infisical. ![Azure SAML identity provider values 1](../../../images/sso/azure/idp-values.png) @@ -109,10 +109,22 @@ description: "Learn how to configure Microsoft Entra ID for Infisical SSO." + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite SAML login. + + - If you're configuring SAML SSO on a self-hosted instance of Infisical, make sure to - set the `AUTH_SECRET` and `SITE_URL` environment variable for it to work: - - - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This can be a random 32-byte base64 string generated with `openssl rand -base64 32`. - - `SITE_URL`: The URL of your self-hosted instance of Infisical - should be an absolute URL including the protocol (e.g. https://app.infisical.com) - \ No newline at end of file + If you're configuring SAML SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) + + + + If you'd like to require Multi-factor Authentication for your team members to access Infisical check out our [Entra ID / Azure AD MFA](../mfa#entra-id-azure-ad-mfa) guide. + diff --git a/docs/documentation/platform/sso/general-oidc.mdx b/docs/documentation/platform/sso/general-oidc.mdx new file mode 100644 index 0000000000..7e3a76ff00 --- /dev/null +++ b/docs/documentation/platform/sso/general-oidc.mdx @@ -0,0 +1,88 @@ +--- +title: "General OIDC" +description: "Learn how to configure OIDC for Infisical SSO with any OIDC-compliant identity provider" +--- + + + OIDC SSO is a paid feature. If you're using Infisical Cloud, then it is + available under the **Pro Tier**. If you're self-hosting Infisical, then you + should contact sales@infisical.com to purchase an enterprise license to use + it. + + +You can configure your organization in Infisical to have members authenticate with the platform through identity providers via [OpenID Connect](https://openid.net/specs/openid-connect-core-1_0.html). + +Prerequisites: + +- The identity provider (Okta, Google, Azure AD, etc.) should support OIDC. +- Users in the IdP should have a configured `email` and `given_name`. + + + + 1.1. Register your application with the IdP to obtain a **Client ID** and **Client Secret**. These credentials are used by Infisical to authenticate with your IdP. + + 1.2. Configure **Redirect URL** to be `https://app.infisical.com/api/v1/sso/oidc/callback`. If you're self-hosting Infisical, replace the domain with your own. + + 1.3. Configure the scopes needed by Infisical (email, profile, openid) and ensure that they are mapped to the ID token claims. + + 1.4. Access the IdP’s OIDC discovery document (usually located at `https:///.well-known/openid-configuration`). This document contains important endpoints such as authorization, token, userinfo, and keys. + + + 2.1. Back in Infisical, in the Organization settings > Security > OIDC, click Connect. + ![OIDC general manage org Infisical](../../../images/sso/general-oidc/org-oidc-manage.png) + + 2.2. You can configure OIDC either through the Discovery URL (Recommended) or by inputting custom endpoints. + + To configure OIDC via Discovery URL, set the **Configuration Type** field to **Discovery URL** and fill out the **Discovery Document URL** field. + + + Note that the Discovery Document URL typically takes the form: `https:///.well-known/openid-configuration`. + + + ![OIDC general discovery config](../../../images/sso/general-oidc/discovery-oidc-form.png) + + To configure OIDC via the custom endpoints, set the **Configuration Type** field to **Custom** and input the required endpoint fields. + ![OIDC general custom config](../../../images/sso/general-oidc/custom-oidc-form.png) + + 2.3. Optionally, you can define a whitelist of allowed email domains. + + Finally, fill out the **Client ID** and **Client Secret** fields and press **Update** to complete the required configuration. + + + + + Enabling OIDC SSO allows members in your organization to log into Infisical via the configured Identity Provider + + ![OIDC general enable OIDC](../../../images/sso/general-oidc/org-oidc-enable.png) + + + + Enforcing OIDC SSO ensures that members in your organization can only access Infisical + by logging into the organization via the Identity provider. + + To enforce OIDC SSO, you're required to test out the OpenID connection by successfully authenticating at least one IdP user with Infisical. + Once you've completed this requirement, you can toggle the **Enforce OIDC SSO** button to enforce OIDC SSO. + + + We recommend ensuring that your account is provisioned using the identity provider prior to enforcing OIDC SSO to prevent any unintended issues. + + + + + + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite OIDC login. + + + + + If you're configuring OIDC SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) + diff --git a/docs/documentation/platform/sso/google-saml.mdx b/docs/documentation/platform/sso/google-saml.mdx index 1897a651a7..4f31bffb1f 100644 --- a/docs/documentation/platform/sso/google-saml.mdx +++ b/docs/documentation/platform/sso/google-saml.mdx @@ -4,10 +4,10 @@ description: "Learn how to configure Google SAML for Infisical SSO." --- - Google SAML SSO feature is a paid feature. - - If you're using Infisical Cloud, then it is available under the **Pro Tier**. If you're self-hosting Infisical, - then you should contact sales@infisical.com to purchase an enterprise license to use it. + Google SAML SSO feature is a paid feature. If you're using Infisical Cloud, + then it is available under the **Pro Tier**. If you're self-hosting Infisical, + then you should contact sales@infisical.com to purchase an enterprise license + to use it. @@ -15,8 +15,9 @@ description: "Learn how to configure Google SAML for Infisical SSO." In Infisical, head to your Organization Settings > Authentication > SAML SSO Configuration and select **Set up SAML SSO**. Next, note the **ACS URL** and **SP Entity ID** to use when configuring the Google SAML application. - + ![Google SAML initial configuration](../../../images/sso/google-saml/init-config.png) + 2.1. In your [Google Admin console](https://support.google.com/a/answer/182076), head to Menu > Apps > Web and mobile apps and @@ -32,7 +33,7 @@ description: "Learn how to configure Google SAML for Infisical SSO." ![Google SAML custom app details](../../../images/sso/google-saml/custom-saml-app-config.png) - 2.4. Back in Infisical, set **SSO URL**, **IdP Entity ID**, and **Certificate** to the corresponding items from step 2.3. + 2.4. Back in Infisical, set **SSO URL** and **Certificate** to the corresponding items from step 2.3. ![Google SAML Infisical config](../../../images/sso/google-saml/infisical-config.png) @@ -41,7 +42,7 @@ description: "Learn how to configure Google SAML for Infisical SSO." Also, check the **Signed response** checkbox. ![Google SAML app config 2](../../../images/sso/google-saml/custom-saml-app-config-2.png) - + 2.6. In the **Attribute mapping** tab, configure the following map: - **First name** -> **firstName** @@ -49,7 +50,7 @@ description: "Learn how to configure Google SAML for Infisical SSO." - **Primary email** -> **email** ![Google SAML attribute mapping](../../../images/sso/google-saml/attribute-mapping.png) - + Click **Finish**. @@ -57,11 +58,11 @@ description: "Learn how to configure Google SAML for Infisical SSO." and press on **User access**. ![Google SAML user access](../../../images/sso/google-saml/user-access.png) - + To assign everyone in your organization to the application, click **On for everyone** or **Off for everyone** and then click **Save**. - + You can also assign an organizational unit or set of users to an application; you can learn more about that [here](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app). - + ![Google SAML user access assignment](../../../images/sso/google-saml/user-access-assign.png) @@ -75,21 +76,31 @@ description: "Learn how to configure Google SAML for Infisical SSO." To enforce SAML SSO, you're required to test out the SAML connection by successfully authenticating at least one Google user with Infisical; Once you've completed this requirement, you can toggle the **Enforce SAML SSO** button to enforce SAML SSO. - + We recommend ensuring that your account is provisioned the application in Google prior to enforcing SAML SSO to prevent any unintended issues. + + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite SAML login. + + - If you're configuring SAML SSO on a self-hosted instance of Infisical, make sure to - set the `AUTH_SECRET` and `SITE_URL` environment variable for it to work: - - - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This can be a random 32-byte base64 string generated with `openssl rand -base64 32`. - - `SITE_URL`: The URL of your self-hosted instance of Infisical - should be an absolute URL including the protocol (e.g. https://app.infisical.com) + If you're configuring SAML SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) References: -- Google's guide to [set up your own custom SAML app](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app). \ No newline at end of file + +- Google's guide to [set up your own custom SAML app](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app). diff --git a/docs/documentation/platform/sso/jumpcloud.mdx b/docs/documentation/platform/sso/jumpcloud.mdx index 781f5224a7..ce89b8e0dd 100644 --- a/docs/documentation/platform/sso/jumpcloud.mdx +++ b/docs/documentation/platform/sso/jumpcloud.mdx @@ -89,10 +89,18 @@ description: "Learn how to configure JumpCloud SAML for Infisical SSO." + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite SAML login. + + - If you're configuring SAML SSO on a self-hosted instance of Infisical, make sure to - set the `AUTH_SECRET` and `SITE_URL` environment variable for it to work: - - - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This can be a random 32-byte base64 string generated with `openssl rand -base64 32`. - - `SITE_URL`: The URL of your self-hosted instance of Infisical - should be an absolute URL including the protocol (e.g. https://app.infisical.com) + If you're configuring SAML SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) diff --git a/docs/documentation/platform/sso/keycloak-oidc.mdx b/docs/documentation/platform/sso/keycloak-oidc.mdx new file mode 100644 index 0000000000..cb774a014b --- /dev/null +++ b/docs/documentation/platform/sso/keycloak-oidc.mdx @@ -0,0 +1,112 @@ +--- +title: "Keycloak OIDC" +description: "Learn how to configure Keycloak OIDC for Infisical SSO." +--- + + + Keycloak OIDC SSO is a paid feature. If you're using Infisical Cloud, then it + is available under the **Pro Tier**. If you're self-hosting Infisical, then + you should contact sales@infisical.com to purchase an enterprise license to + use it. + + + + + 1.1. In your realm, navigate to the **Clients** tab and click **Create client** to create a new client application. + + ![OIDC keycloak list of clients](../../../images/sso/keycloak-oidc/clients-list.png) + + + You don’t typically need to make a realm dedicated to Infisical. We recommend adding Infisical as a client to your primary realm. + + + 1.2. In the General Settings step, set **Client type** to **OpenID Connect**, the **Client ID** field to an appropriate identifier, and the **Name** field to a friendly name like **Infisical**. + + ![OIDC keycloak create client general settings](../../../images/sso/keycloak-oidc/create-client-general-settings.png) + + 1.3. Next, in the Capability Config step, ensure that **Client Authentication** is set to On and that **Standard flow** is enabled in the Authentication flow section. + + ![OIDC keycloak create client capability config settings](../../../images/sso/keycloak-oidc/create-client-capability.png) + + 1.4. In the Login Settings step, set the following values: + - Root URL: `https://app.infisical.com`. + - Home URL: `https://app.infisical.com`. + - Valid Redirect URIs: `https://app.infisical.com/api/v1/sso/oidc/callback`. + - Web origins: `https://app.infisical.com`. + + ![OIDC keycloak create client login settings](../../../images/sso/keycloak-oidc/create-client-login-settings.png) + + If you’re self-hosting Infisical, then you will want to replace https://app.infisical.com (base URL) with your own domain. + + + 1.5. Next, navigate to the **Client scopes** tab and select the client's dedicated scope. + + ![OIDC keycloak client scopes list](../../../images/sso/keycloak-oidc/client-scope-list.png) + + 1.6. Next, click **Add predefined mapper**. + + ![OIDC keycloak client mappers empty](../../../images/sso/keycloak-oidc/client-scope-mapper-menu.png) + + 1.7. Select the **email**, **given name**, **family name** attributes and click **Add**. + + ![OIDC keycloak client mappers predefined 1](../../../images/sso/keycloak-oidc/scope-predefined-mapper-1.png) + ![OIDC keycloak client mappers predefined 2](../../../images/sso/keycloak-oidc/scope-predefined-mapper-2.png) + + Once you've completed the above steps, the list of mappers should look like the following: + ![OIDC keycloak client mappers completed](../../../images/sso/keycloak-oidc/client-scope-complete-overview.png) + + + + 2.1. Back in Keycloak, navigate to Configure > Realm settings > General tab > Endpoints > OpenID Endpoint Configuration and copy the opened URL. This is what is to referred to as the Discovery Document URL and it takes the form: `https://keycloak-mysite.com/realms/myrealm/.well-known/openid-configuration`. + ![OIDC keycloak realm OIDC metadata](../../../images/sso/keycloak-oidc/realm-setting-oidc-config.png) + + 2.2. From the Clients page, navigate to the Credential tab and copy the **Client Secret** to be used in the next steps. + ![OIDC keycloak realm OIDC secret](../../../images/sso/keycloak-oidc/client-secret.png) + + + + 3.1. Back in Infisical, in the Organization settings > Security > OIDC, click Connect. + ![OIDC keycloak manage org Infisical](../../../images/sso/keycloak-oidc/manage-org-oidc.png) + + 3.2. For configuration type, select Discovery URL. Then, set the appropriate values for **Discovery Document URL**, **Client ID**, and **Client Secret**. + ![OIDC keycloak paste values into Infisical](../../../images/sso/keycloak-oidc/create-oidc.png) + + Once you've done that, press **Update** to complete the required configuration. + + + + Enabling OIDC SSO allows members in your organization to log into Infisical via Keycloak. + + ![OIDC keycloak enable OIDC](../../../images/sso/keycloak-oidc/enable-oidc.png) + + + + Enforcing OIDC SSO ensures that members in your organization can only access Infisical + by logging into the organization via Keycloak. + + To enforce OIDC SSO, you're required to test out the OpenID connection by successfully authenticating at least one Keycloak user with Infisical. + Once you've completed this requirement, you can toggle the **Enforce OIDC SSO** button to enforce OIDC SSO. + + + We recommend ensuring that your account is provisioned using the application in Keycloak + prior to enforcing OIDC SSO to prevent any unintended issues. + + + + + + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite OIDC login. + + + + If you're configuring OIDC SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) + diff --git a/docs/documentation/platform/sso/keycloak-saml.mdx b/docs/documentation/platform/sso/keycloak-saml.mdx index 9817397117..53f47f1ae0 100644 --- a/docs/documentation/platform/sso/keycloak-saml.mdx +++ b/docs/documentation/platform/sso/keycloak-saml.mdx @@ -130,10 +130,18 @@ description: "Learn how to configure Keycloak SAML for Infisical SSO." + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite SAML login. + + - If you're configuring SAML SSO on a self-hosted instance of Infisical, make sure to - set the `AUTH_SECRET` and `SITE_URL` environment variable for it to work: - - - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This can be a random 32-byte base64 string generated with `openssl rand -base64 32`. - - `SITE_URL`: The URL of your self-hosted instance of Infisical - should be an absolute URL including the protocol (e.g. https://app.infisical.com) + If you're configuring SAML SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) \ No newline at end of file diff --git a/docs/documentation/platform/sso/okta.mdx b/docs/documentation/platform/sso/okta.mdx index b0ac046d03..9a1d4aa2f6 100644 --- a/docs/documentation/platform/sso/okta.mdx +++ b/docs/documentation/platform/sso/okta.mdx @@ -98,11 +98,18 @@ description: "Learn how to configure Okta SAML 2.0 for Infisical SSO." + + If you are only using one organization on your Infisical instance, you can configure a default organization in the [Server Admin Console](../admin-panel/server-admin#default-organization) to expedite SAML login. + + - If you're configuring SAML SSO on a self-hosted instance of Infisical, make - sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to - work: - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This - can be a random 32-byte base64 string generated with `openssl rand -base64 - 32`. - `SITE_URL`: The URL of your self-hosted instance of Infisical - should - be an absolute URL including the protocol (e.g. https://app.infisical.com) + If you're configuring SAML SSO on a self-hosted instance of Infisical, make + sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to + work: +
+ - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This + can be a random 32-byte base64 string generated with `openssl rand -base64 + 32`. +
+ - `SITE_URL`: The absolute URL of your self-hosted instance of Infisical including the protocol (e.g. https://app.infisical.com) diff --git a/docs/documentation/platform/sso/overview.mdx b/docs/documentation/platform/sso/overview.mdx index 9ab0acc3ae..227a7502f3 100644 --- a/docs/documentation/platform/sso/overview.mdx +++ b/docs/documentation/platform/sso/overview.mdx @@ -7,16 +7,14 @@ description: "Learn how to log in to Infisical via SSO protocols." Infisical offers Google SSO and GitHub SSO for free across both Infisical Cloud and Infisical Self-hosted. Infisical also offers SAML SSO authentication - but as paid features that can be unlocked on Infisical Cloud's **Pro** tier or - via enterprise license on self-hosted instances of Infisical. On this front, - we support industry-leading providers including Okta, Azure AD, and JumpCloud; - with any questions, please reach out to team@infisical.com. + and OpenID Connect (OIDC) but as paid features that can be unlocked on + Infisical Cloud's **Pro** tier or via enterprise license on self-hosted + instances of Infisical. On this front, we support industry-leading providers + including Okta, Azure AD, and JumpCloud; with any questions, please reach out + to team@infisical.com. -You can configure your organization in Infisical to have members authenticate with the platform via protocols like [SAML 2.0](https://en.wikipedia.org/wiki/SAML_2.0). - -To note, Infisical's SSO implementation decouples the **authentication** and **decryption** steps – which implies that no -Identity Provider can have access to the decryption key needed to decrypt your secrets (this also implies that Infisical requires entering the user's Master Password on top of authenticating with SSO). +You can configure your organization in Infisical to have members authenticate with the platform via protocols like [SAML 2.0](https://en.wikipedia.org/wiki/SAML_2.0) or [OpenID Connect](https://openid.net/specs/openid-connect-core-1_0.html). ## Identity providers @@ -30,6 +28,9 @@ Infisical supports these and many other identity providers: - [JumpCloud SAML](/documentation/platform/sso/jumpcloud) - [Keycloak SAML](/documentation/platform/sso/keycloak-saml) - [Google SAML](/documentation/platform/sso/google-saml) +- [Keycloak OIDC](/documentation/platform/sso/keycloak-oidc) +- [Auth0 OIDC](/documentation/platform/sso/auth0-oidc) +- [General OIDC](/documentation/platform/sso/general-oidc) If your required identity provider is not shown in the list above, please reach out to [team@infisical.com](mailto:team@infisical.com) for assistance. @@ -44,7 +45,7 @@ If your required identity provider is not shown in the list above, please reach verification step upon their first login. If you're running a self-hosted instance of Infisical and would like it to trust emails from external identity providers, - you can configure this behavior in the admin panel. + you can configure this behavior in the Server Admin Console. diff --git a/docs/documentation/platform/token.mdx b/docs/documentation/platform/token.mdx index 13bd4cd191..af31f5a4a0 100644 --- a/docs/documentation/platform/token.mdx +++ b/docs/documentation/platform/token.mdx @@ -3,13 +3,6 @@ title: "Service Token" description: "Infisical service tokens allow users to programmatically interact with Infisical." --- - - Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). - -They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - - - Service tokens are authentication credentials that services can use to access designated endpoints in the Infisical API to manage project resources like secrets. Each service token can be provisioned scoped access to select environment(s) and path(s) within them. diff --git a/docs/documentation/platform/webhooks.mdx b/docs/documentation/platform/webhooks.mdx index 22277dd8c7..dc3a71b27f 100644 --- a/docs/documentation/platform/webhooks.mdx +++ b/docs/documentation/platform/webhooks.mdx @@ -9,7 +9,9 @@ Webhooks can be used to trigger changes to your integrations when secrets are mo To create a webhook for a particular project, go to `Project Settings > Webhooks`. -When creating a webhook, you can specify an environment and folder path (using glob patterns) to trigger only specific integrations. +Infisical supports two webhook types - General and Slack. If you need to integrate with Slack, use the Slack type with an [Incoming Webhook](https://api.slack.com/messaging/webhooks). When creating a webhook, you can specify an environment and folder path to trigger only specific integrations. + +![webhook-create](../../images/webhook-create.png) ## Secret Key Verification @@ -27,7 +29,7 @@ If the signature in the header matches the signature that you generated, then yo { "event": "secret.modified", "project": { - "workspaceId":"the workspace id", + "workspaceId": "the workspace id", "environment": "project environment", "secretPath": "project folder path" }, diff --git a/docs/documentation/platform/workflow-integrations/slack-integration.mdx b/docs/documentation/platform/workflow-integrations/slack-integration.mdx new file mode 100644 index 0000000000..92b3feecab --- /dev/null +++ b/docs/documentation/platform/workflow-integrations/slack-integration.mdx @@ -0,0 +1,164 @@ +--- +title: "Slack integration" +description: "Learn how to setup Slack integration" +--- + +This guide will provide step by step instructions on how to configure Slack integration for your Infisical projects. + +## Setting up Slack integration in your projects + + + + ### Create Slack workflow integration + + + In order to use Slack integration in your projects, you will first have to + configure a Slack workflow integration in your organization. + ![org-slack-overview](/images/platform/workflow-integrations/slack-integration/org-slack-integration-overview.png) + + + Press "Add" and select "Slack" as the platform. + ![org-slack-initial-add](/images/platform/workflow-integrations/slack-integration/org-slack-integration-initial-add.png) + + Give your Slack integration a descriptive alias. You will use this to select the Slack integration for your project. + ![org-slack-add-form](/images/platform/workflow-integrations/slack-integration/org-slack-integration-add-form.png) + + Press **Connect Slack**. This opens up the Slack app installation flow. Select the Slack workspace you want to install the custom Slack app to and press **Allow**. + ![org-slack-authenticate](/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-authenticate.png) + + This completes the workflow integration creation flow. The projects in your organization can now use this Slack integration to send real-time updates to your Slack workspace. + ![org-slack-workspace](/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-workspace.png) + ![org-slack-created](/images/platform/workflow-integrations/slack-integration/org-slack-integration-created.png) + + + + + + ### Configure project to use Slack workflow integration + + + + ![project-slack-overview](/images/platform/workflow-integrations/slack-integration/project-slack-integration-overview.png) + + + Your project will send notifications to the connected Slack workspace of the + selected Slack integration when the configured events are triggered. + ![project-slack-select](/images/platform/workflow-integrations/slack-integration/project-slack-integration-select.png) + + + ![project-slack-select](/images/platform/workflow-integrations/slack-integration/project-slack-integration-config.png) + + To enable notifications in private Slack channels, you need to invite the Infisical Slack bot to join those channels. + + You now have a working native integration with Slack! + + + + + + + + ### Configure admin settings + Note that this step only has to be done once for the entire instance. + + + + Before anything else, you need to setup the Slack app to be used by + your Infisical instance. Because you're self-hosting, you will need to + create this Slack application as demonstrated in the preceding step. + ![admin-settings-slack-overview](/images/platform/workflow-integrations/slack-integration/admin-slack-integration-overview.png) + + + Click the "Create Slack app" button. This will open up a new window with the + custom app creation flow on Slack. + ![admin-slack-create-app](/images/platform/workflow-integrations/slack-integration/admin-slack-integration-create-app.png) + + Select the Slack workspace you want to integrate with Infisical. + + ![admin-slack-app-workspace-select](/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-workspace-select.png) + + The configuration values of your custom Slack app will be pre-filled for you. You can view or edit the app manifest by clicking **Edit Configurations**. + ![admin-slack-app-summary](/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-summary.png) + + Once everything's confirmed, press Create. + + + + Copy the Client ID and Client Secret values from your newly created custom Slack app and add them to Infisical. + ![admin-slack-app-credentials](/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credentials.png) + ![admin-slack-app-credentials-form](/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credential-form.png) + Complete the admin setup by pressing Save. + + + + + ### Create Slack workflow integration + + + + In order to use Slack integration in your projects, you will first have to + configure a Slack workflow integration in your organization. + ![org-slack-overview](/images/platform/workflow-integrations/slack-integration/org-slack-integration-overview.png) + + + Press "Add" and select "Slack" as the platform. + ![org-slack-initial-add](/images/platform/workflow-integrations/slack-integration/org-slack-integration-initial-add.png) + + Give your Slack integration a descriptive alias. You will use this to select the Slack integration for your project. + ![org-slack-add-form](/images/platform/workflow-integrations/slack-integration/org-slack-integration-add-form.png) + + Press **Connect Slack**. This opens up the Slack app installation flow. Select the Slack workspace you want to install the custom Slack app to and press **Allow**. + ![org-slack-authenticate](/images/platform/workflow-integrations/slack-integration/org-slack-integration-authenticate.png) + + Your Slack bot will then be added to your selected Slack workspace. This completes the workflow integration creation flow. Your projects in the organization can now use this Slack integration to send real-time updates to your Slack workspace. + ![org-slack-workspace](/images/platform/workflow-integrations/slack-integration/org-slack-integration-workspace.png) + ![org-slack-created](/images/platform/workflow-integrations/slack-integration/org-slack-integration-created.png) + + + + + + ### Configure project to use Slack workflow integration + + + + ![project-slack-overview](/images/platform/workflow-integrations/slack-integration/project-slack-integration-overview.png) + + + Your project will send notifications to the connected Slack workspace of the + selected Slack integration when the configured events are triggered. + ![project-slack-select](/images/platform/workflow-integrations/slack-integration/project-slack-integration-select.png) + + + ![project-slack-select](/images/platform/workflow-integrations/slack-integration/project-slack-integration-config.png) + + To enable notifications in private Slack channels, you need to invite your Slack bot to join those channels. + + You now have a working native integration with Slack! + + + + + + + + +## Using the Slack integration in your private channels + + + + ![private slack setup + menu](/images/platform/workflow-integrations/slack-integration/private-slack-setup-menu.png) + + + ![private slack setup + add](/images/platform/workflow-integrations/slack-integration/private-slack-setup-add.png) + + + ![private slack setup + form](/images/platform/workflow-integrations/slack-integration/private-slack-setup-form.png) + You can now view the private channels in the Slack channel selection fields! + ![private slack setup + channels](/images/platform/workflow-integrations/slack-integration/private-slack-setup-channel-field.png) + + diff --git a/docs/images/guides/import-envkey/copy-encryption-key.png b/docs/images/guides/import-envkey/copy-encryption-key.png new file mode 100644 index 0000000000..df0c7b4599 Binary files /dev/null and b/docs/images/guides/import-envkey/copy-encryption-key.png differ diff --git a/docs/images/guides/import-envkey/envkey-dashboard.png b/docs/images/guides/import-envkey/envkey-dashboard.png new file mode 100644 index 0000000000..0170e00fbe Binary files /dev/null and b/docs/images/guides/import-envkey/envkey-dashboard.png differ diff --git a/docs/images/guides/import-envkey/envkey-export.png b/docs/images/guides/import-envkey/envkey-export.png new file mode 100644 index 0000000000..68d55ed148 Binary files /dev/null and b/docs/images/guides/import-envkey/envkey-export.png differ diff --git a/docs/images/guides/import-envkey/infisical-import-dashboard.png b/docs/images/guides/import-envkey/infisical-import-dashboard.png new file mode 100644 index 0000000000..ab197f1263 Binary files /dev/null and b/docs/images/guides/import-envkey/infisical-import-dashboard.png differ diff --git a/docs/images/guides/import-envkey/infisical-import-envkey.png b/docs/images/guides/import-envkey/infisical-import-envkey.png new file mode 100644 index 0000000000..c504c98243 Binary files /dev/null and b/docs/images/guides/import-envkey/infisical-import-envkey.png differ diff --git a/docs/images/integrations/aws/integration-aws-iam-assume-arn.png b/docs/images/integrations/aws/integration-aws-iam-assume-arn.png new file mode 100644 index 0000000000..1c36fc1517 Binary files /dev/null and b/docs/images/integrations/aws/integration-aws-iam-assume-arn.png differ diff --git a/docs/images/integrations/aws/integration-aws-iam-assume-permission.png b/docs/images/integrations/aws/integration-aws-iam-assume-permission.png new file mode 100644 index 0000000000..0fb8d493db Binary files /dev/null and b/docs/images/integrations/aws/integration-aws-iam-assume-permission.png differ diff --git a/docs/images/integrations/aws/integration-aws-iam-assume-role.png b/docs/images/integrations/aws/integration-aws-iam-assume-role.png new file mode 100644 index 0000000000..29094b060c Binary files /dev/null and b/docs/images/integrations/aws/integration-aws-iam-assume-role.png differ diff --git a/docs/images/integrations/aws/integration-aws-iam-assume-select.png b/docs/images/integrations/aws/integration-aws-iam-assume-select.png new file mode 100644 index 0000000000..63c5d2b01c Binary files /dev/null and b/docs/images/integrations/aws/integration-aws-iam-assume-select.png differ diff --git a/docs/images/integrations/aws/integration-aws-parameter-store-iam-assume-select.png b/docs/images/integrations/aws/integration-aws-parameter-store-iam-assume-select.png new file mode 100644 index 0000000000..2e070c9a83 Binary files /dev/null and b/docs/images/integrations/aws/integration-aws-parameter-store-iam-assume-select.png differ diff --git a/docs/images/integrations/aws/integrations-aws-parameter-store-auth.png b/docs/images/integrations/aws/integrations-aws-parameter-store-auth.png index 14d54c9a6c..297ba66cdf 100644 Binary files a/docs/images/integrations/aws/integrations-aws-parameter-store-auth.png and b/docs/images/integrations/aws/integrations-aws-parameter-store-auth.png differ diff --git a/docs/images/integrations/aws/integrations-aws-secret-manager-auth.png b/docs/images/integrations/aws/integrations-aws-secret-manager-auth.png index cc17097e16..ae83fcf9a4 100644 Binary files a/docs/images/integrations/aws/integrations-aws-secret-manager-auth.png and b/docs/images/integrations/aws/integrations-aws-secret-manager-auth.png differ diff --git a/docs/images/integrations/azure-app-configuration/app-api-permissions.png b/docs/images/integrations/azure-app-configuration/app-api-permissions.png new file mode 100644 index 0000000000..174d5ff980 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/app-api-permissions.png differ diff --git a/docs/images/integrations/azure-app-configuration/app-registration-redirect.png b/docs/images/integrations/azure-app-configuration/app-registration-redirect.png new file mode 100644 index 0000000000..bd1edb3d75 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/app-registration-redirect.png differ diff --git a/docs/images/integrations/azure-app-configuration/azure-app-config-endpoint.png b/docs/images/integrations/azure-app-configuration/azure-app-config-endpoint.png new file mode 100644 index 0000000000..7f80575781 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/azure-app-config-endpoint.png differ diff --git a/docs/images/integrations/azure-app-configuration/config-aad.png b/docs/images/integrations/azure-app-configuration/config-aad.png new file mode 100644 index 0000000000..58c20c536b Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/config-aad.png differ diff --git a/docs/images/integrations/azure-app-configuration/config-credentials-1.png b/docs/images/integrations/azure-app-configuration/config-credentials-1.png new file mode 100644 index 0000000000..264c483413 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/config-credentials-1.png differ diff --git a/docs/images/integrations/azure-app-configuration/config-credentials-2.png b/docs/images/integrations/azure-app-configuration/config-credentials-2.png new file mode 100644 index 0000000000..087e00468d Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/config-credentials-2.png differ diff --git a/docs/images/integrations/azure-app-configuration/config-credentials-3.png b/docs/images/integrations/azure-app-configuration/config-credentials-3.png new file mode 100644 index 0000000000..08481bb842 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/config-credentials-3.png differ diff --git a/docs/images/integrations/azure-app-configuration/config-new-app.png b/docs/images/integrations/azure-app-configuration/config-new-app.png new file mode 100644 index 0000000000..fa9abd08c3 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/config-new-app.png differ diff --git a/docs/images/integrations/azure-app-configuration/create-integration-form.png b/docs/images/integrations/azure-app-configuration/create-integration-form.png new file mode 100644 index 0000000000..58a935d8f8 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/create-integration-form.png differ diff --git a/docs/images/integrations/azure-app-configuration/new-infisical-integration.png b/docs/images/integrations/azure-app-configuration/new-infisical-integration.png new file mode 100644 index 0000000000..f33ee55692 Binary files /dev/null and b/docs/images/integrations/azure-app-configuration/new-infisical-integration.png differ diff --git a/docs/images/integrations/azure-devops/create-new-token.png b/docs/images/integrations/azure-devops/create-new-token.png new file mode 100644 index 0000000000..d00ce1000c Binary files /dev/null and b/docs/images/integrations/azure-devops/create-new-token.png differ diff --git a/docs/images/integrations/azure-devops/new-infiscial-integration-step-1.png b/docs/images/integrations/azure-devops/new-infiscial-integration-step-1.png new file mode 100644 index 0000000000..9977f8dc0c Binary files /dev/null and b/docs/images/integrations/azure-devops/new-infiscial-integration-step-1.png differ diff --git a/docs/images/integrations/azure-devops/new-infiscial-integration-step-2.png b/docs/images/integrations/azure-devops/new-infiscial-integration-step-2.png new file mode 100644 index 0000000000..0bd5db3ac1 Binary files /dev/null and b/docs/images/integrations/azure-devops/new-infiscial-integration-step-2.png differ diff --git a/docs/images/integrations/azure-devops/new-token-created.png b/docs/images/integrations/azure-devops/new-token-created.png new file mode 100644 index 0000000000..55502f9d3b Binary files /dev/null and b/docs/images/integrations/azure-devops/new-token-created.png differ diff --git a/docs/images/integrations/azure-devops/overview-page.png b/docs/images/integrations/azure-devops/overview-page.png new file mode 100644 index 0000000000..cf0d85fe89 Binary files /dev/null and b/docs/images/integrations/azure-devops/overview-page.png differ diff --git a/docs/images/integrations/azure-key-vault/integrations-azure-key-vault-create.png b/docs/images/integrations/azure-key-vault/integrations-azure-key-vault-create.png index d8fb24c6e9..13bfe0dfca 100644 Binary files a/docs/images/integrations/azure-key-vault/integrations-azure-key-vault-create.png and b/docs/images/integrations/azure-key-vault/integrations-azure-key-vault-create.png differ diff --git a/docs/images/integrations/azure-key-vault/integrations-azure-key-vault.png b/docs/images/integrations/azure-key-vault/integrations-azure-key-vault.png index 700c3c7edb..0dd158fdcc 100644 Binary files a/docs/images/integrations/azure-key-vault/integrations-azure-key-vault.png and b/docs/images/integrations/azure-key-vault/integrations-azure-key-vault.png differ diff --git a/docs/images/integrations/bitbucket/integrations-bitbucket-configuration.png b/docs/images/integrations/bitbucket/integrations-bitbucket-configuration.png new file mode 100644 index 0000000000..658cefc7bd Binary files /dev/null and b/docs/images/integrations/bitbucket/integrations-bitbucket-configuration.png differ diff --git a/docs/images/integrations/bitbucket/integrations-bitbucket-env.png b/docs/images/integrations/bitbucket/integrations-bitbucket-env.png new file mode 100644 index 0000000000..8c683a1001 Binary files /dev/null and b/docs/images/integrations/bitbucket/integrations-bitbucket-env.png differ diff --git a/docs/images/integrations/cloudflare/integrations-cloudflare-workers-permission.png b/docs/images/integrations/cloudflare/integrations-cloudflare-workers-permission.png new file mode 100644 index 0000000000..88899670a5 Binary files /dev/null and b/docs/images/integrations/cloudflare/integrations-cloudflare-workers-permission.png differ diff --git a/docs/images/integrations/databricks/integrations-databricks-auth.png b/docs/images/integrations/databricks/integrations-databricks-auth.png new file mode 100644 index 0000000000..de17cfd5c4 Binary files /dev/null and b/docs/images/integrations/databricks/integrations-databricks-auth.png differ diff --git a/docs/images/integrations/databricks/integrations-databricks-create.png b/docs/images/integrations/databricks/integrations-databricks-create.png new file mode 100644 index 0000000000..058f08369a Binary files /dev/null and b/docs/images/integrations/databricks/integrations-databricks-create.png differ diff --git a/docs/images/integrations/databricks/integrations-databricks.png b/docs/images/integrations/databricks/integrations-databricks.png new file mode 100644 index 0000000000..f48f6b95a2 Binary files /dev/null and b/docs/images/integrations/databricks/integrations-databricks.png differ diff --git a/docs/images/integrations/databricks/pat-token.png b/docs/images/integrations/databricks/pat-token.png new file mode 100644 index 0000000000..02a264710c Binary files /dev/null and b/docs/images/integrations/databricks/pat-token.png differ diff --git a/docs/images/integrations/github/app/github-app-installation.png b/docs/images/integrations/github/app/github-app-installation.png new file mode 100644 index 0000000000..60a2ec4fcd Binary files /dev/null and b/docs/images/integrations/github/app/github-app-installation.png differ diff --git a/docs/images/integrations/github/app/github-app-method-selection.png b/docs/images/integrations/github/app/github-app-method-selection.png new file mode 100644 index 0000000000..3f66a396e1 Binary files /dev/null and b/docs/images/integrations/github/app/github-app-method-selection.png differ diff --git a/docs/images/integrations/github/app/integration-overview.png b/docs/images/integrations/github/app/integration-overview.png new file mode 100644 index 0000000000..1dad2fb645 Binary files /dev/null and b/docs/images/integrations/github/app/integration-overview.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-basic-details.png b/docs/images/integrations/github/app/self-hosted-github-app-basic-details.png new file mode 100644 index 0000000000..463adabd8c Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-basic-details.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-create-confirm.png b/docs/images/integrations/github/app/self-hosted-github-app-create-confirm.png new file mode 100644 index 0000000000..15dc7f9d68 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-create-confirm.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-create.png b/docs/images/integrations/github/app/self-hosted-github-app-create.png new file mode 100644 index 0000000000..d55a49b668 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-create.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-credentials.png b/docs/images/integrations/github/app/self-hosted-github-app-credentials.png new file mode 100644 index 0000000000..6e4480bc74 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-credentials.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-enable-oauth.png b/docs/images/integrations/github/app/self-hosted-github-app-enable-oauth.png new file mode 100644 index 0000000000..45d50c7b29 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-enable-oauth.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-organization.png b/docs/images/integrations/github/app/self-hosted-github-app-organization.png new file mode 100644 index 0000000000..60ba84151c Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-organization.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-private-key.png b/docs/images/integrations/github/app/self-hosted-github-app-private-key.png new file mode 100644 index 0000000000..ce03f740e2 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-private-key.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-repository.png b/docs/images/integrations/github/app/self-hosted-github-app-repository.png new file mode 100644 index 0000000000..edf1d10870 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-repository.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-secret.png b/docs/images/integrations/github/app/self-hosted-github-app-secret.png new file mode 100644 index 0000000000..8c99184044 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-secret.png differ diff --git a/docs/images/integrations/github/app/self-hosted-github-app-webhook.png b/docs/images/integrations/github/app/self-hosted-github-app-webhook.png new file mode 100644 index 0000000000..2b7493fbf9 Binary files /dev/null and b/docs/images/integrations/github/app/self-hosted-github-app-webhook.png differ diff --git a/docs/images/integrations/github/github-oauth-method-selection.png b/docs/images/integrations/github/github-oauth-method-selection.png new file mode 100644 index 0000000000..eb1d00cfce Binary files /dev/null and b/docs/images/integrations/github/github-oauth-method-selection.png differ diff --git a/docs/images/integrations/github/integration-overview.png b/docs/images/integrations/github/integration-overview.png new file mode 100644 index 0000000000..1dad2fb645 Binary files /dev/null and b/docs/images/integrations/github/integration-overview.png differ diff --git a/docs/images/integrations/github/integrations-github-scope-env.png b/docs/images/integrations/github/integrations-github-scope-env.png index e38874bd38..3c2592908d 100644 Binary files a/docs/images/integrations/github/integrations-github-scope-env.png and b/docs/images/integrations/github/integrations-github-scope-env.png differ diff --git a/docs/images/integrations/github/integrations-github-scope-org.png b/docs/images/integrations/github/integrations-github-scope-org.png index d5ef76a2bf..9f56e9e9f7 100644 Binary files a/docs/images/integrations/github/integrations-github-scope-org.png and b/docs/images/integrations/github/integrations-github-scope-org.png differ diff --git a/docs/images/integrations/github/integrations-github-scope-repo.png b/docs/images/integrations/github/integrations-github-scope-repo.png index 353527c788..4b1b223f0c 100644 Binary files a/docs/images/integrations/github/integrations-github-scope-repo.png and b/docs/images/integrations/github/integrations-github-scope-repo.png differ diff --git a/docs/images/mfa-authenticator.png b/docs/images/mfa-authenticator.png new file mode 100644 index 0000000000..2a72042edf Binary files /dev/null and b/docs/images/mfa-authenticator.png differ diff --git a/docs/images/mfa-email.png b/docs/images/mfa-email.png index f592ee239e..01f3a23f8a 100644 Binary files a/docs/images/mfa-email.png and b/docs/images/mfa-email.png differ diff --git a/docs/images/platform/access-controls/access-request-bypass.png b/docs/images/platform/access-controls/access-request-bypass.png new file mode 100644 index 0000000000..2481505742 Binary files /dev/null and b/docs/images/platform/access-controls/access-request-bypass.png differ diff --git a/docs/images/platform/access-controls/access-request-policies.png b/docs/images/platform/access-controls/access-request-policies.png index d7ea4829c9..a0eca9dfde 100644 Binary files a/docs/images/platform/access-controls/access-request-policies.png and b/docs/images/platform/access-controls/access-request-policies.png differ diff --git a/docs/images/platform/access-controls/add-metadata-step1.png b/docs/images/platform/access-controls/add-metadata-step1.png new file mode 100644 index 0000000000..db74a75baf Binary files /dev/null and b/docs/images/platform/access-controls/add-metadata-step1.png differ diff --git a/docs/images/platform/access-controls/add-metadata-step2.png b/docs/images/platform/access-controls/add-metadata-step2.png new file mode 100644 index 0000000000..4fd24b150b Binary files /dev/null and b/docs/images/platform/access-controls/add-metadata-step2.png differ diff --git a/docs/images/platform/access-controls/add-metadata-step3.png b/docs/images/platform/access-controls/add-metadata-step3.png new file mode 100644 index 0000000000..6571628c5c Binary files /dev/null and b/docs/images/platform/access-controls/add-metadata-step3.png differ diff --git a/docs/images/platform/access-controls/create-access-request-policy.png b/docs/images/platform/access-controls/create-access-request-policy.png index 6593fd7334..56f9840cf0 100644 Binary files a/docs/images/platform/access-controls/create-access-request-policy.png and b/docs/images/platform/access-controls/create-access-request-policy.png differ diff --git a/docs/images/platform/access-controls/example-abac-1.png b/docs/images/platform/access-controls/example-abac-1.png new file mode 100644 index 0000000000..244f78d788 Binary files /dev/null and b/docs/images/platform/access-controls/example-abac-1.png differ diff --git a/docs/images/platform/admin-panels/access-org-admin-console.png b/docs/images/platform/admin-panels/access-org-admin-console.png new file mode 100644 index 0000000000..057c829440 Binary files /dev/null and b/docs/images/platform/admin-panels/access-org-admin-console.png differ diff --git a/docs/images/platform/admin-panels/access-server-admin-panel.png b/docs/images/platform/admin-panels/access-server-admin-panel.png new file mode 100644 index 0000000000..a27735de07 Binary files /dev/null and b/docs/images/platform/admin-panels/access-server-admin-panel.png differ diff --git a/docs/images/platform/admin-panels/admin-panel-auths.png b/docs/images/platform/admin-panels/admin-panel-auths.png new file mode 100644 index 0000000000..a0abd5d9a6 Binary files /dev/null and b/docs/images/platform/admin-panels/admin-panel-auths.png differ diff --git a/docs/images/platform/admin-panels/admin-panel-general.png b/docs/images/platform/admin-panels/admin-panel-general.png new file mode 100644 index 0000000000..bce175cf0d Binary files /dev/null and b/docs/images/platform/admin-panels/admin-panel-general.png differ diff --git a/docs/images/platform/admin-panels/admin-panel-integration.png b/docs/images/platform/admin-panels/admin-panel-integration.png new file mode 100644 index 0000000000..43bedd17ec Binary files /dev/null and b/docs/images/platform/admin-panels/admin-panel-integration.png differ diff --git a/docs/images/platform/admin-panels/admin-panel-rate-limits.png b/docs/images/platform/admin-panels/admin-panel-rate-limits.png new file mode 100644 index 0000000000..d8f689f1ea Binary files /dev/null and b/docs/images/platform/admin-panels/admin-panel-rate-limits.png differ diff --git a/docs/images/platform/admin-panels/admin-panel-users.png b/docs/images/platform/admin-panels/admin-panel-users.png new file mode 100644 index 0000000000..94add6d857 Binary files /dev/null and b/docs/images/platform/admin-panels/admin-panel-users.png differ diff --git a/docs/images/platform/admin-panels/org-admin-console-access.png b/docs/images/platform/admin-panels/org-admin-console-access.png new file mode 100644 index 0000000000..6aba5b21aa Binary files /dev/null and b/docs/images/platform/admin-panels/org-admin-console-access.png differ diff --git a/docs/images/platform/admin-panels/org-admin-console-projects.png b/docs/images/platform/admin-panels/org-admin-console-projects.png new file mode 100644 index 0000000000..13b8bcfce9 Binary files /dev/null and b/docs/images/platform/admin-panels/org-admin-console-projects.png differ diff --git a/docs/images/platform/dynamic-secrets/add-dynamic-secret-button.png b/docs/images/platform/dynamic-secrets/add-dynamic-secret-button.png index 8d0fd3ecc5..537f20e73a 100644 Binary files a/docs/images/platform/dynamic-secrets/add-dynamic-secret-button.png and b/docs/images/platform/dynamic-secrets/add-dynamic-secret-button.png differ diff --git a/docs/images/platform/dynamic-secrets/advanced-option-atlas.png b/docs/images/platform/dynamic-secrets/advanced-option-atlas.png new file mode 100644 index 0000000000..50c9f89bd8 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/advanced-option-atlas.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-assignments.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-assignments.png new file mode 100644 index 0000000000..561639de07 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-assignments.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-client-secret.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-client-secret.png new file mode 100644 index 0000000000..358fc53c17 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-client-secret.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-permission.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-permission.png new file mode 100644 index 0000000000..20614f9dcc Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-add-permission.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-admin-consent.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-admin-consent.png new file mode 100644 index 0000000000..5c8102450a Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-admin-consent.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-copy-app-id.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-copy-app-id.png new file mode 100644 index 0000000000..aa36ee39dc Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-copy-app-id.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-lease.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-lease.png new file mode 100644 index 0000000000..4740062db9 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-lease.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-modal.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-modal.png new file mode 100644 index 0000000000..481c789237 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-modal.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-new-registration.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-new-registration.png new file mode 100644 index 0000000000..285df9d77c Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-new-registration.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-select-graph.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-select-graph.png new file mode 100644 index 0000000000..98f23c0846 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-select-graph.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-select-perms.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-select-perms.png new file mode 100644 index 0000000000..45abaa7380 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-select-perms.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-show-more.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-show-more.png new file mode 100644 index 0000000000..df4dc95675 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-show-more.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-tenant-id.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-tenant-id.png new file mode 100644 index 0000000000..5b0cb47633 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-tenant-id.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ad-user-admin.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-user-admin.png new file mode 100644 index 0000000000..dfc1deadd0 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ad-user-admin.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-atlas-modal.png b/docs/images/platform/dynamic-secrets/dynamic-secret-atlas-modal.png new file mode 100644 index 0000000000..f18ee35477 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-atlas-modal.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png b/docs/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png new file mode 100644 index 0000000000..db7f8be355 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-generate-redis.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-input-modal-elastic-search.png b/docs/images/platform/dynamic-secrets/dynamic-secret-input-modal-elastic-search.png new file mode 100644 index 0000000000..14d2d48b20 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-input-modal-elastic-search.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-input-modal-rabbit-mq.png b/docs/images/platform/dynamic-secrets/dynamic-secret-input-modal-rabbit-mq.png new file mode 100644 index 0000000000..41508b4655 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-input-modal-rabbit-mq.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ldap-lease.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ldap-lease.png new file mode 100644 index 0000000000..e053a8ddca Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ldap-lease.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-ldap-select.png b/docs/images/platform/dynamic-secrets/dynamic-secret-ldap-select.png new file mode 100644 index 0000000000..7694a3fef5 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-ldap-select.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png b/docs/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png new file mode 100644 index 0000000000..a7842b2988 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-lease-empty-redis.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png b/docs/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png index e975544153..3d2e32e8a7 100644 Binary files a/docs/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png and b/docs/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-modal-atlas.png b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-atlas.png new file mode 100644 index 0000000000..e3989b00ff Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-atlas.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-modal-aws-elasti-cache.png b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-aws-elasti-cache.png new file mode 100644 index 0000000000..bf461c6a61 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-aws-elasti-cache.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-modal-elastic-search.png b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-elastic-search.png new file mode 100644 index 0000000000..bb21f628cd Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-elastic-search.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-modal-mongodb.png b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-mongodb.png new file mode 100644 index 0000000000..be000f7536 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-mongodb.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-modal-redis.png b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-redis.png new file mode 100644 index 0000000000..a4835e517d Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-redis.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-modal-sap-hana.png b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-sap-hana.png new file mode 100644 index 0000000000..202431cc21 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-modal-sap-hana.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-mongodb.png b/docs/images/platform/dynamic-secrets/dynamic-secret-mongodb.png new file mode 100644 index 0000000000..d3a804f8f0 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-mongodb.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-rabbit-mq-modal.png b/docs/images/platform/dynamic-secrets/dynamic-secret-rabbit-mq-modal.png new file mode 100644 index 0000000000..885a17a88e Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-rabbit-mq-modal.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-setup-modal-mssql.png b/docs/images/platform/dynamic-secrets/dynamic-secret-setup-modal-mssql.png new file mode 100644 index 0000000000..7f296a4414 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-setup-modal-mssql.png differ diff --git a/docs/images/platform/dynamic-secrets/dynamic-secret-setup-modal-sap-hana.png b/docs/images/platform/dynamic-secrets/dynamic-secret-setup-modal-sap-hana.png new file mode 100644 index 0000000000..1c97efe1ef Binary files /dev/null and b/docs/images/platform/dynamic-secrets/dynamic-secret-setup-modal-sap-hana.png differ diff --git a/docs/images/platform/dynamic-secrets/lease-data.png b/docs/images/platform/dynamic-secrets/lease-data.png index aecd8c11d2..9562da1b5f 100644 Binary files a/docs/images/platform/dynamic-secrets/lease-data.png and b/docs/images/platform/dynamic-secrets/lease-data.png differ diff --git a/docs/images/platform/dynamic-secrets/lease-values.png b/docs/images/platform/dynamic-secrets/lease-values.png index d552845f80..962bd76ec4 100644 Binary files a/docs/images/platform/dynamic-secrets/lease-values.png and b/docs/images/platform/dynamic-secrets/lease-values.png differ diff --git a/docs/images/platform/dynamic-secrets/modify-elastic-search-statement.png b/docs/images/platform/dynamic-secrets/modify-elastic-search-statement.png new file mode 100644 index 0000000000..9315a40779 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/modify-elastic-search-statement.png differ diff --git a/docs/images/platform/dynamic-secrets/modify-elasticache-statement.png b/docs/images/platform/dynamic-secrets/modify-elasticache-statement.png new file mode 100644 index 0000000000..c8cd662d0e Binary files /dev/null and b/docs/images/platform/dynamic-secrets/modify-elasticache-statement.png differ diff --git a/docs/images/platform/dynamic-secrets/modify-redis-statement.png b/docs/images/platform/dynamic-secrets/modify-redis-statement.png new file mode 100644 index 0000000000..c9726f752a Binary files /dev/null and b/docs/images/platform/dynamic-secrets/modify-redis-statement.png differ diff --git a/docs/images/platform/dynamic-secrets/modify-sap-hana-sql-statements.png b/docs/images/platform/dynamic-secrets/modify-sap-hana-sql-statements.png new file mode 100644 index 0000000000..973fcf731b Binary files /dev/null and b/docs/images/platform/dynamic-secrets/modify-sap-hana-sql-statements.png differ diff --git a/docs/images/platform/dynamic-secrets/modify-sql-statements-mssql.png b/docs/images/platform/dynamic-secrets/modify-sql-statements-mssql.png new file mode 100644 index 0000000000..e399db47da Binary files /dev/null and b/docs/images/platform/dynamic-secrets/modify-sql-statements-mssql.png differ diff --git a/docs/images/platform/dynamic-secrets/provision-lease.png b/docs/images/platform/dynamic-secrets/provision-lease.png index f144a5ae21..96b0505b92 100644 Binary files a/docs/images/platform/dynamic-secrets/provision-lease.png and b/docs/images/platform/dynamic-secrets/provision-lease.png differ diff --git a/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-create-service-user.png b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-create-service-user.png new file mode 100644 index 0000000000..b0285614c8 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-create-service-user.png differ diff --git a/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-identifiers.png b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-identifiers.png new file mode 100644 index 0000000000..21c2c6c0ee Binary files /dev/null and b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-identifiers.png differ diff --git a/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-modal.png b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-modal.png new file mode 100644 index 0000000000..1c767ae5b4 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-modal.png differ diff --git a/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-setup-modal.png b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-setup-modal.png new file mode 100644 index 0000000000..cd28e459c5 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-setup-modal.png differ diff --git a/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-sql-statements.png b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-sql-statements.png new file mode 100644 index 0000000000..44c41bd520 Binary files /dev/null and b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-sql-statements.png differ diff --git a/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-users-page.png b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-users-page.png new file mode 100644 index 0000000000..d3759924cf Binary files /dev/null and b/docs/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-users-page.png differ diff --git a/docs/images/platform/identities/identities-org-create-aws-auth-method.png b/docs/images/platform/identities/identities-org-create-aws-auth-method.png index 4b902c048a..c2bb447e17 100644 Binary files a/docs/images/platform/identities/identities-org-create-aws-auth-method.png and b/docs/images/platform/identities/identities-org-create-aws-auth-method.png differ diff --git a/docs/images/platform/identities/identities-org-create-azure-auth-method.png b/docs/images/platform/identities/identities-org-create-azure-auth-method.png index fc0fd16657..fe57021bb9 100644 Binary files a/docs/images/platform/identities/identities-org-create-azure-auth-method.png and b/docs/images/platform/identities/identities-org-create-azure-auth-method.png differ diff --git a/docs/images/platform/identities/identities-org-create-gcp-gce-auth-method.png b/docs/images/platform/identities/identities-org-create-gcp-gce-auth-method.png index 899130c428..6a7100d300 100644 Binary files a/docs/images/platform/identities/identities-org-create-gcp-gce-auth-method.png and b/docs/images/platform/identities/identities-org-create-gcp-gce-auth-method.png differ diff --git a/docs/images/platform/identities/identities-org-create-gcp-iam-auth-method.png b/docs/images/platform/identities/identities-org-create-gcp-iam-auth-method.png index 9dacf9f89f..f871b4ba48 100644 Binary files a/docs/images/platform/identities/identities-org-create-gcp-iam-auth-method.png and b/docs/images/platform/identities/identities-org-create-gcp-iam-auth-method.png differ diff --git a/docs/images/platform/identities/identities-org-create-kubernetes-auth-method.png b/docs/images/platform/identities/identities-org-create-kubernetes-auth-method.png index 0c2fe072d2..c79e71ba37 100644 Binary files a/docs/images/platform/identities/identities-org-create-kubernetes-auth-method.png and b/docs/images/platform/identities/identities-org-create-kubernetes-auth-method.png differ diff --git a/docs/images/platform/identities/identities-org-create-oidc-auth-method.png b/docs/images/platform/identities/identities-org-create-oidc-auth-method.png new file mode 100644 index 0000000000..ae43737527 Binary files /dev/null and b/docs/images/platform/identities/identities-org-create-oidc-auth-method.png differ diff --git a/docs/images/platform/identities/identities-org-create-token-auth-method.png b/docs/images/platform/identities/identities-org-create-token-auth-method.png new file mode 100644 index 0000000000..a897aac58a Binary files /dev/null and b/docs/images/platform/identities/identities-org-create-token-auth-method.png differ diff --git a/docs/images/platform/identities/identities-org-create-universal-auth-method.png b/docs/images/platform/identities/identities-org-create-universal-auth-method.png new file mode 100644 index 0000000000..a3a0a8d9cd Binary files /dev/null and b/docs/images/platform/identities/identities-org-create-universal-auth-method.png differ diff --git a/docs/images/platform/identities/identities-page-remove-default-auth.png b/docs/images/platform/identities/identities-page-remove-default-auth.png new file mode 100644 index 0000000000..5b8f22fa2c Binary files /dev/null and b/docs/images/platform/identities/identities-page-remove-default-auth.png differ diff --git a/docs/images/platform/identities/identities-page.png b/docs/images/platform/identities/identities-page.png new file mode 100644 index 0000000000..35b8af658d Binary files /dev/null and b/docs/images/platform/identities/identities-page.png differ diff --git a/docs/images/platform/identities/identities-token-auth-create-1.png b/docs/images/platform/identities/identities-token-auth-create-1.png new file mode 100644 index 0000000000..c4d689fb8e Binary files /dev/null and b/docs/images/platform/identities/identities-token-auth-create-1.png differ diff --git a/docs/images/platform/identities/identities-token-auth-create-2.png b/docs/images/platform/identities/identities-token-auth-create-2.png new file mode 100644 index 0000000000..b812cb1c03 Binary files /dev/null and b/docs/images/platform/identities/identities-token-auth-create-2.png differ diff --git a/docs/images/platform/identities/identities-token-auth-create-3.png b/docs/images/platform/identities/identities-token-auth-create-3.png new file mode 100644 index 0000000000..8a18973cca Binary files /dev/null and b/docs/images/platform/identities/identities-token-auth-create-3.png differ diff --git a/docs/images/platform/identities/identities-universal-auth-create-1.png b/docs/images/platform/identities/identities-universal-auth-create-1.png new file mode 100644 index 0000000000..f12789a3c6 Binary files /dev/null and b/docs/images/platform/identities/identities-universal-auth-create-1.png differ diff --git a/docs/images/platform/identities/identities-universal-auth-create-2.png b/docs/images/platform/identities/identities-universal-auth-create-2.png new file mode 100644 index 0000000000..4706c21b8c Binary files /dev/null and b/docs/images/platform/identities/identities-universal-auth-create-2.png differ diff --git a/docs/images/platform/identities/identities-universal-auth-create-3.png b/docs/images/platform/identities/identities-universal-auth-create-3.png new file mode 100644 index 0000000000..ddb9483357 Binary files /dev/null and b/docs/images/platform/identities/identities-universal-auth-create-3.png differ diff --git a/docs/images/platform/kms/aws-hsm/create-key-store-cert.png b/docs/images/platform/kms/aws-hsm/create-key-store-cert.png new file mode 100644 index 0000000000..c07c2a8952 Binary files /dev/null and b/docs/images/platform/kms/aws-hsm/create-key-store-cert.png differ diff --git a/docs/images/platform/kms/aws-hsm/create-key-store-cluster.png b/docs/images/platform/kms/aws-hsm/create-key-store-cluster.png new file mode 100644 index 0000000000..245b11d98c Binary files /dev/null and b/docs/images/platform/kms/aws-hsm/create-key-store-cluster.png differ diff --git a/docs/images/platform/kms/aws-hsm/create-key-store-name.png b/docs/images/platform/kms/aws-hsm/create-key-store-name.png new file mode 100644 index 0000000000..1b47604b85 Binary files /dev/null and b/docs/images/platform/kms/aws-hsm/create-key-store-name.png differ diff --git a/docs/images/platform/kms/aws-hsm/create-key-store-password.png b/docs/images/platform/kms/aws-hsm/create-key-store-password.png new file mode 100644 index 0000000000..5ae84394d6 Binary files /dev/null and b/docs/images/platform/kms/aws-hsm/create-key-store-password.png differ diff --git a/docs/images/platform/kms/aws-hsm/create-kms-key-1.png b/docs/images/platform/kms/aws-hsm/create-kms-key-1.png new file mode 100644 index 0000000000..a5bb700c90 Binary files /dev/null and b/docs/images/platform/kms/aws-hsm/create-kms-key-1.png differ diff --git a/docs/images/platform/kms/aws-hsm/create-kms-key-2.png b/docs/images/platform/kms/aws-hsm/create-kms-key-2.png new file mode 100644 index 0000000000..78f3926d87 Binary files /dev/null and b/docs/images/platform/kms/aws-hsm/create-kms-key-2.png differ diff --git a/docs/images/platform/kms/aws-hsm/create-kms-select-hsm.png b/docs/images/platform/kms/aws-hsm/create-kms-select-hsm.png new file mode 100644 index 0000000000..925bf6928b Binary files /dev/null and b/docs/images/platform/kms/aws-hsm/create-kms-select-hsm.png differ diff --git a/docs/images/platform/kms/aws/aws-kms-key-id.png b/docs/images/platform/kms/aws/aws-kms-key-id.png new file mode 100644 index 0000000000..ddeac50934 Binary files /dev/null and b/docs/images/platform/kms/aws/aws-kms-key-id.png differ diff --git a/docs/images/platform/kms/aws/encryption-modal-provider-select.png b/docs/images/platform/kms/aws/encryption-modal-provider-select.png new file mode 100644 index 0000000000..704043a74b Binary files /dev/null and b/docs/images/platform/kms/aws/encryption-modal-provider-select.png differ diff --git a/docs/images/platform/kms/aws/encryption-org-settings-add.png b/docs/images/platform/kms/aws/encryption-org-settings-add.png new file mode 100644 index 0000000000..03c9a3eaa0 Binary files /dev/null and b/docs/images/platform/kms/aws/encryption-org-settings-add.png differ diff --git a/docs/images/platform/kms/aws/encryption-org-settings.png b/docs/images/platform/kms/aws/encryption-org-settings.png new file mode 100644 index 0000000000..b413cd321f Binary files /dev/null and b/docs/images/platform/kms/aws/encryption-org-settings.png differ diff --git a/docs/images/platform/kms/aws/encryption-project-settings-select.png b/docs/images/platform/kms/aws/encryption-project-settings-select.png new file mode 100644 index 0000000000..a6323bcb3f Binary files /dev/null and b/docs/images/platform/kms/aws/encryption-project-settings-select.png differ diff --git a/docs/images/platform/kms/aws/encryption-project-settings.png b/docs/images/platform/kms/aws/encryption-project-settings.png new file mode 100644 index 0000000000..1df97abaea Binary files /dev/null and b/docs/images/platform/kms/aws/encryption-project-settings.png differ diff --git a/docs/images/platform/kms/configure-kms-existing.png b/docs/images/platform/kms/configure-kms-existing.png new file mode 100644 index 0000000000..8d26724aab Binary files /dev/null and b/docs/images/platform/kms/configure-kms-existing.png differ diff --git a/docs/images/platform/kms/configure-kms-new.png b/docs/images/platform/kms/configure-kms-new.png new file mode 100644 index 0000000000..e18bb227c8 Binary files /dev/null and b/docs/images/platform/kms/configure-kms-new.png differ diff --git a/docs/images/platform/kms/hsm/encryption-strategy.png b/docs/images/platform/kms/hsm/encryption-strategy.png new file mode 100644 index 0000000000..18eab934dd Binary files /dev/null and b/docs/images/platform/kms/hsm/encryption-strategy.png differ diff --git a/docs/images/platform/kms/hsm/hsm-illustration.png b/docs/images/platform/kms/hsm/hsm-illustration.png new file mode 100644 index 0000000000..b35c3f10a1 Binary files /dev/null and b/docs/images/platform/kms/hsm/hsm-illustration.png differ diff --git a/docs/images/platform/kms/hsm/server-admin-console.png b/docs/images/platform/kms/hsm/server-admin-console.png new file mode 100644 index 0000000000..661cfc2d20 Binary files /dev/null and b/docs/images/platform/kms/hsm/server-admin-console.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-add-key-modal.png b/docs/images/platform/kms/infisical-kms/kms-add-key-modal.png new file mode 100644 index 0000000000..c1738eb437 Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-add-key-modal.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-add-key.png b/docs/images/platform/kms/infisical-kms/kms-add-key.png new file mode 100644 index 0000000000..4fdc448986 Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-add-key.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-decrypt-data.png b/docs/images/platform/kms/infisical-kms/kms-decrypt-data.png new file mode 100644 index 0000000000..63d4e2aee7 Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-decrypt-data.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-decrypt-options.png b/docs/images/platform/kms/infisical-kms/kms-decrypt-options.png new file mode 100644 index 0000000000..e07a701962 Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-decrypt-options.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-decrypted-data.png b/docs/images/platform/kms/infisical-kms/kms-decrypted-data.png new file mode 100644 index 0000000000..0d9ad82b8f Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-decrypted-data.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-encrypt-data.png b/docs/images/platform/kms/infisical-kms/kms-encrypt-data.png new file mode 100644 index 0000000000..d73f1cc3b6 Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-encrypt-data.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-encrypted-data.png b/docs/images/platform/kms/infisical-kms/kms-encrypted-data.png new file mode 100644 index 0000000000..33f896183f Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-encrypted-data.png differ diff --git a/docs/images/platform/kms/infisical-kms/kms-key-options.png b/docs/images/platform/kms/infisical-kms/kms-key-options.png new file mode 100644 index 0000000000..b1e45e3569 Binary files /dev/null and b/docs/images/platform/kms/infisical-kms/kms-key-options.png differ diff --git a/docs/images/platform/ldap/ldap-config.png b/docs/images/platform/ldap/ldap-config.png index 2cd711dd18..0ba0b57728 100644 Binary files a/docs/images/platform/ldap/ldap-config.png and b/docs/images/platform/ldap/ldap-config.png differ diff --git a/docs/images/platform/ldap/ldap-test-connection.png b/docs/images/platform/ldap/ldap-test-connection.png index 9f1a3896c7..7400aafd51 100644 Binary files a/docs/images/platform/ldap/ldap-test-connection.png and b/docs/images/platform/ldap/ldap-test-connection.png differ diff --git a/docs/images/platform/mfa/entra/mfa_entra_conditional_access.png b/docs/images/platform/mfa/entra/mfa_entra_conditional_access.png new file mode 100644 index 0000000000..a1ac6732f9 Binary files /dev/null and b/docs/images/platform/mfa/entra/mfa_entra_conditional_access.png differ diff --git a/docs/images/platform/mfa/entra/mfa_entra_confirm_policy.png b/docs/images/platform/mfa/entra/mfa_entra_confirm_policy.png new file mode 100644 index 0000000000..7f5432f003 Binary files /dev/null and b/docs/images/platform/mfa/entra/mfa_entra_confirm_policy.png differ diff --git a/docs/images/platform/mfa/entra/mfa_entra_create_policy.png b/docs/images/platform/mfa/entra/mfa_entra_create_policy.png new file mode 100644 index 0000000000..54a64b4e3b Binary files /dev/null and b/docs/images/platform/mfa/entra/mfa_entra_create_policy.png differ diff --git a/docs/images/platform/mfa/entra/mfa_entra_infisical_app.png b/docs/images/platform/mfa/entra/mfa_entra_infisical_app.png new file mode 100644 index 0000000000..e9fb4f6253 Binary files /dev/null and b/docs/images/platform/mfa/entra/mfa_entra_infisical_app.png differ diff --git a/docs/images/platform/mfa/entra/mfa_entra_login.png b/docs/images/platform/mfa/entra/mfa_entra_login.png new file mode 100644 index 0000000000..a5dd09330c Binary files /dev/null and b/docs/images/platform/mfa/entra/mfa_entra_login.png differ diff --git a/docs/images/platform/mfa/entra/mfa_entra_review_policy.png b/docs/images/platform/mfa/entra/mfa_entra_review_policy.png new file mode 100644 index 0000000000..c53feb0fb0 Binary files /dev/null and b/docs/images/platform/mfa/entra/mfa_entra_review_policy.png differ diff --git a/docs/images/platform/organization/organization-machine-identities.png b/docs/images/platform/organization/organization-machine-identities.png index 17bea6e9bf..4400b9f62f 100644 Binary files a/docs/images/platform/organization/organization-machine-identities.png and b/docs/images/platform/organization/organization-machine-identities.png differ diff --git a/docs/images/platform/organization/organization-settings-general.png b/docs/images/platform/organization/organization-settings-general.png index 2c60090feb..affcf32ff0 100644 Binary files a/docs/images/platform/organization/organization-settings-general.png and b/docs/images/platform/organization/organization-settings-general.png differ diff --git a/docs/images/platform/pki/alerting/alert-create-2.png b/docs/images/platform/pki/alerting/alert-create-2.png new file mode 100644 index 0000000000..812f253f81 Binary files /dev/null and b/docs/images/platform/pki/alerting/alert-create-2.png differ diff --git a/docs/images/platform/pki/alerting/alert-create.png b/docs/images/platform/pki/alerting/alert-create.png new file mode 100644 index 0000000000..4a7b3227fd Binary files /dev/null and b/docs/images/platform/pki/alerting/alert-create.png differ diff --git a/docs/images/platform/pki/alerting/alerts.png b/docs/images/platform/pki/alerting/alerts.png new file mode 100644 index 0000000000..c7a5096ceb Binary files /dev/null and b/docs/images/platform/pki/alerting/alerts.png differ diff --git a/docs/images/platform/pki/alerting/collection-add-cert.png b/docs/images/platform/pki/alerting/collection-add-cert.png new file mode 100644 index 0000000000..6300cc8923 Binary files /dev/null and b/docs/images/platform/pki/alerting/collection-add-cert.png differ diff --git a/docs/images/platform/pki/alerting/collection-create-2.png b/docs/images/platform/pki/alerting/collection-create-2.png new file mode 100644 index 0000000000..53378ef053 Binary files /dev/null and b/docs/images/platform/pki/alerting/collection-create-2.png differ diff --git a/docs/images/platform/pki/alerting/collection-create.png b/docs/images/platform/pki/alerting/collection-create.png new file mode 100644 index 0000000000..7c4883201f Binary files /dev/null and b/docs/images/platform/pki/alerting/collection-create.png differ diff --git a/docs/images/platform/pki/ca-crl.png b/docs/images/platform/pki/ca-crl.png new file mode 100644 index 0000000000..efe7d3b4ab Binary files /dev/null and b/docs/images/platform/pki/ca-crl.png differ diff --git a/docs/images/platform/pki/ca-renewal-modal.png b/docs/images/platform/pki/ca-renewal-modal.png new file mode 100644 index 0000000000..c86d944f33 Binary files /dev/null and b/docs/images/platform/pki/ca-renewal-modal.png differ diff --git a/docs/images/platform/pki/ca-renewal-page.png b/docs/images/platform/pki/ca-renewal-page.png new file mode 100644 index 0000000000..43c690ae77 Binary files /dev/null and b/docs/images/platform/pki/ca-renewal-page.png differ diff --git a/docs/images/platform/pki/ca/ca-create-intermediate.png b/docs/images/platform/pki/ca/ca-create-intermediate.png new file mode 100644 index 0000000000..ac83db3e98 Binary files /dev/null and b/docs/images/platform/pki/ca/ca-create-intermediate.png differ diff --git a/docs/images/platform/pki/ca/ca-create-root.png b/docs/images/platform/pki/ca/ca-create-root.png new file mode 100644 index 0000000000..a8bf936a3c Binary files /dev/null and b/docs/images/platform/pki/ca/ca-create-root.png differ diff --git a/docs/images/platform/pki/ca/ca-create.png b/docs/images/platform/pki/ca/ca-create.png new file mode 100644 index 0000000000..915ed684c0 Binary files /dev/null and b/docs/images/platform/pki/ca/ca-create.png differ diff --git a/docs/images/platform/pki/ca/ca-install-intermediate-csr.png b/docs/images/platform/pki/ca/ca-install-intermediate-csr.png new file mode 100644 index 0000000000..77c7df0b97 Binary files /dev/null and b/docs/images/platform/pki/ca/ca-install-intermediate-csr.png differ diff --git a/docs/images/platform/pki/ca/ca-install-intermediate-opt.png b/docs/images/platform/pki/ca/ca-install-intermediate-opt.png new file mode 100644 index 0000000000..16afd2f0b8 Binary files /dev/null and b/docs/images/platform/pki/ca/ca-install-intermediate-opt.png differ diff --git a/docs/images/platform/pki/ca/ca-install-intermediate.png b/docs/images/platform/pki/ca/ca-install-intermediate.png new file mode 100644 index 0000000000..10c9424ff6 Binary files /dev/null and b/docs/images/platform/pki/ca/ca-install-intermediate.png differ diff --git a/docs/images/platform/pki/ca/cas.png b/docs/images/platform/pki/ca/cas.png new file mode 100644 index 0000000000..d3189fd1af Binary files /dev/null and b/docs/images/platform/pki/ca/cas.png differ diff --git a/docs/images/platform/pki/cert-revoke-modal.png b/docs/images/platform/pki/cert-revoke-modal.png new file mode 100644 index 0000000000..07bc7fce86 Binary files /dev/null and b/docs/images/platform/pki/cert-revoke-modal.png differ diff --git a/docs/images/platform/pki/cert-revoke.png b/docs/images/platform/pki/cert-revoke.png new file mode 100644 index 0000000000..ff7fcc5978 Binary files /dev/null and b/docs/images/platform/pki/cert-revoke.png differ diff --git a/docs/images/platform/pki/certificate/cert-body.png b/docs/images/platform/pki/certificate/cert-body.png new file mode 100644 index 0000000000..8c1433b54e Binary files /dev/null and b/docs/images/platform/pki/certificate/cert-body.png differ diff --git a/docs/images/platform/pki/certificate/cert-issue-modal.png b/docs/images/platform/pki/certificate/cert-issue-modal.png new file mode 100644 index 0000000000..352c4979c5 Binary files /dev/null and b/docs/images/platform/pki/certificate/cert-issue-modal.png differ diff --git a/docs/images/platform/pki/certificate/cert-issue.png b/docs/images/platform/pki/certificate/cert-issue.png new file mode 100644 index 0000000000..614271d193 Binary files /dev/null and b/docs/images/platform/pki/certificate/cert-issue.png differ diff --git a/docs/images/platform/pki/certificate/cert-template-modal.png b/docs/images/platform/pki/certificate/cert-template-modal.png new file mode 100644 index 0000000000..2f6c881668 Binary files /dev/null and b/docs/images/platform/pki/certificate/cert-template-modal.png differ diff --git a/docs/images/platform/pki/est/template-enroll-hover.png b/docs/images/platform/pki/est/template-enroll-hover.png new file mode 100644 index 0000000000..cc0f6f6583 Binary files /dev/null and b/docs/images/platform/pki/est/template-enroll-hover.png differ diff --git a/docs/images/platform/pki/est/template-enrollment-est-label.png b/docs/images/platform/pki/est/template-enrollment-est-label.png new file mode 100644 index 0000000000..8a13beec9b Binary files /dev/null and b/docs/images/platform/pki/est/template-enrollment-est-label.png differ diff --git a/docs/images/platform/pki/est/template-enrollment-modal.png b/docs/images/platform/pki/est/template-enrollment-modal.png new file mode 100644 index 0000000000..60ed273d7b Binary files /dev/null and b/docs/images/platform/pki/est/template-enrollment-modal.png differ diff --git a/docs/images/platform/pr-workflows/create-change-policy.png b/docs/images/platform/pr-workflows/create-change-policy.png new file mode 100644 index 0000000000..4ff1ad884b Binary files /dev/null and b/docs/images/platform/pr-workflows/create-change-policy.png differ diff --git a/docs/images/platform/pr-workflows/secret-update-policy.png b/docs/images/platform/pr-workflows/secret-update-policy.png index 45e6322f18..53a4e92ca0 100644 Binary files a/docs/images/platform/pr-workflows/secret-update-policy.png and b/docs/images/platform/pr-workflows/secret-update-policy.png differ diff --git a/docs/images/platform/project-templates/project-template-add-button.png b/docs/images/platform/project-templates/project-template-add-button.png new file mode 100644 index 0000000000..965de1e9a4 Binary files /dev/null and b/docs/images/platform/project-templates/project-template-add-button.png differ diff --git a/docs/images/platform/project-templates/project-template-apply.png b/docs/images/platform/project-templates/project-template-apply.png new file mode 100644 index 0000000000..1ec49cb436 Binary files /dev/null and b/docs/images/platform/project-templates/project-template-apply.png differ diff --git a/docs/images/platform/project-templates/project-template-create.png b/docs/images/platform/project-templates/project-template-create.png new file mode 100644 index 0000000000..6cd1090495 Binary files /dev/null and b/docs/images/platform/project-templates/project-template-create.png differ diff --git a/docs/images/platform/project-templates/project-template-customized.png b/docs/images/platform/project-templates/project-template-customized.png new file mode 100644 index 0000000000..f21717326e Binary files /dev/null and b/docs/images/platform/project-templates/project-template-customized.png differ diff --git a/docs/images/platform/project-templates/project-template-edit-form.png b/docs/images/platform/project-templates/project-template-edit-form.png new file mode 100644 index 0000000000..c4e29297f1 Binary files /dev/null and b/docs/images/platform/project-templates/project-template-edit-form.png differ diff --git a/docs/images/platform/scim/azure/scim-azure-add-users-and-groups.png b/docs/images/platform/scim/azure/scim-azure-add-users-and-groups.png new file mode 100644 index 0000000000..ec8b4428ad Binary files /dev/null and b/docs/images/platform/scim/azure/scim-azure-add-users-and-groups.png differ diff --git a/docs/images/platform/scim/scim-group-mapping.png b/docs/images/platform/scim/scim-group-mapping.png new file mode 100644 index 0000000000..76baa8d8d4 Binary files /dev/null and b/docs/images/platform/scim/scim-group-mapping.png differ diff --git a/docs/images/platform/secret-sharing/create-new-secret.png b/docs/images/platform/secret-sharing/create-new-secret.png index 335fca2b25..03a34e19df 100644 Binary files a/docs/images/platform/secret-sharing/create-new-secret.png and b/docs/images/platform/secret-sharing/create-new-secret.png differ diff --git a/docs/images/platform/secret-sharing/public-view.png b/docs/images/platform/secret-sharing/public-view.png index 8b4077c652..9673fcd377 100644 Binary files a/docs/images/platform/secret-sharing/public-view.png and b/docs/images/platform/secret-sharing/public-view.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credential-form.png b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credential-form.png new file mode 100644 index 0000000000..0b97be58a4 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credential-form.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credentials.png b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credentials.png new file mode 100644 index 0000000000..ddf245effa Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-credentials.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-summary.png b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-summary.png new file mode 100644 index 0000000000..95e9fe4ba3 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-summary.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-workspace-select.png b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-workspace-select.png new file mode 100644 index 0000000000..0c047ab1aa Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-app-workspace-select.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-create-app.png b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-create-app.png new file mode 100644 index 0000000000..502dbde0a6 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-create-app.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-overview.png b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-overview.png new file mode 100644 index 0000000000..54ad6ca6ed Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/admin-slack-integration-overview.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-authenticate.png b/docs/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-authenticate.png new file mode 100644 index 0000000000..048e91f853 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-authenticate.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-workspace.png b/docs/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-workspace.png new file mode 100644 index 0000000000..416247775f Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/cloud-org-slack-integration-workspace.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-add-form.png b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-add-form.png new file mode 100644 index 0000000000..97b38d6e45 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-add-form.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-authenticate.png b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-authenticate.png new file mode 100644 index 0000000000..166e5849f8 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-authenticate.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-created.png b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-created.png new file mode 100644 index 0000000000..f46f61d894 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-created.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-initial-add.png b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-initial-add.png new file mode 100644 index 0000000000..5bd66d9327 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-initial-add.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-overview.png b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-overview.png new file mode 100644 index 0000000000..1f73865595 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-overview.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-workspace.png b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-workspace.png new file mode 100644 index 0000000000..042ad1b77d Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/org-slack-integration-workspace.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-add.png b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-add.png new file mode 100644 index 0000000000..1945313e88 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-add.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-channel-field.png b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-channel-field.png new file mode 100644 index 0000000000..3f6bd0d1d5 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-channel-field.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-form.png b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-form.png new file mode 100644 index 0000000000..7d5ac05605 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-form.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-menu.png b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-menu.png new file mode 100644 index 0000000000..087f31483d Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/private-slack-setup-menu.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-config.png b/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-config.png new file mode 100644 index 0000000000..c6dd70ad73 Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-config.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-overview.png b/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-overview.png new file mode 100644 index 0000000000..944db9ccac Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-overview.png differ diff --git a/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-select.png b/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-select.png new file mode 100644 index 0000000000..073d3fd93c Binary files /dev/null and b/docs/images/platform/workflow-integrations/slack-integration/project-slack-integration-select.png differ diff --git a/docs/images/self-hosting/deployment-options/native/ha-stack.png b/docs/images/self-hosting/deployment-options/native/ha-stack.png new file mode 100644 index 0000000000..1f71dc9e2c Binary files /dev/null and b/docs/images/self-hosting/deployment-options/native/ha-stack.png differ diff --git a/docs/images/self-hosting/deployment-options/native/haproxy-stats.png b/docs/images/self-hosting/deployment-options/native/haproxy-stats.png new file mode 100644 index 0000000000..d8cb38d603 Binary files /dev/null and b/docs/images/self-hosting/deployment-options/native/haproxy-stats.png differ diff --git a/docs/images/sso/auth0-oidc/application-connections.png b/docs/images/sso/auth0-oidc/application-connections.png new file mode 100644 index 0000000000..8307f3b248 Binary files /dev/null and b/docs/images/sso/auth0-oidc/application-connections.png differ diff --git a/docs/images/sso/auth0-oidc/application-credential.png b/docs/images/sso/auth0-oidc/application-credential.png new file mode 100644 index 0000000000..157d824156 Binary files /dev/null and b/docs/images/sso/auth0-oidc/application-credential.png differ diff --git a/docs/images/sso/auth0-oidc/application-origin.png b/docs/images/sso/auth0-oidc/application-origin.png new file mode 100644 index 0000000000..82394c6fd5 Binary files /dev/null and b/docs/images/sso/auth0-oidc/application-origin.png differ diff --git a/docs/images/sso/auth0-oidc/application-settings.png b/docs/images/sso/auth0-oidc/application-settings.png new file mode 100644 index 0000000000..5f708ba2af Binary files /dev/null and b/docs/images/sso/auth0-oidc/application-settings.png differ diff --git a/docs/images/sso/auth0-oidc/application-uris.png b/docs/images/sso/auth0-oidc/application-uris.png new file mode 100644 index 0000000000..dadc6ce00e Binary files /dev/null and b/docs/images/sso/auth0-oidc/application-uris.png differ diff --git a/docs/images/sso/auth0-oidc/application-urls.png b/docs/images/sso/auth0-oidc/application-urls.png new file mode 100644 index 0000000000..b467d54c3b Binary files /dev/null and b/docs/images/sso/auth0-oidc/application-urls.png differ diff --git a/docs/images/sso/auth0-oidc/enable-oidc.png b/docs/images/sso/auth0-oidc/enable-oidc.png new file mode 100644 index 0000000000..0a43f22ede Binary files /dev/null and b/docs/images/sso/auth0-oidc/enable-oidc.png differ diff --git a/docs/images/sso/auth0-oidc/org-oidc-overview.png b/docs/images/sso/auth0-oidc/org-oidc-overview.png new file mode 100644 index 0000000000..f5778b97a7 Binary files /dev/null and b/docs/images/sso/auth0-oidc/org-oidc-overview.png differ diff --git a/docs/images/sso/auth0-oidc/org-update-oidc.png b/docs/images/sso/auth0-oidc/org-update-oidc.png new file mode 100644 index 0000000000..0b9e96b5b8 Binary files /dev/null and b/docs/images/sso/auth0-oidc/org-update-oidc.png differ diff --git a/docs/images/sso/general-oidc/custom-oidc-form.png b/docs/images/sso/general-oidc/custom-oidc-form.png new file mode 100644 index 0000000000..2aee026801 Binary files /dev/null and b/docs/images/sso/general-oidc/custom-oidc-form.png differ diff --git a/docs/images/sso/general-oidc/discovery-oidc-form.png b/docs/images/sso/general-oidc/discovery-oidc-form.png new file mode 100644 index 0000000000..ae99b35b25 Binary files /dev/null and b/docs/images/sso/general-oidc/discovery-oidc-form.png differ diff --git a/docs/images/sso/general-oidc/org-oidc-enable.png b/docs/images/sso/general-oidc/org-oidc-enable.png new file mode 100644 index 0000000000..0a43f22ede Binary files /dev/null and b/docs/images/sso/general-oidc/org-oidc-enable.png differ diff --git a/docs/images/sso/general-oidc/org-oidc-manage.png b/docs/images/sso/general-oidc/org-oidc-manage.png new file mode 100644 index 0000000000..f5778b97a7 Binary files /dev/null and b/docs/images/sso/general-oidc/org-oidc-manage.png differ diff --git a/docs/images/sso/google-saml/infisical-config.png b/docs/images/sso/google-saml/infisical-config.png index 250b4ed37c..31943df81b 100644 Binary files a/docs/images/sso/google-saml/infisical-config.png and b/docs/images/sso/google-saml/infisical-config.png differ diff --git a/docs/images/sso/keycloak-oidc/client-scope-complete-overview.png b/docs/images/sso/keycloak-oidc/client-scope-complete-overview.png new file mode 100644 index 0000000000..a0965db0b1 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/client-scope-complete-overview.png differ diff --git a/docs/images/sso/keycloak-oidc/client-scope-list.png b/docs/images/sso/keycloak-oidc/client-scope-list.png new file mode 100644 index 0000000000..c35a7691f2 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/client-scope-list.png differ diff --git a/docs/images/sso/keycloak-oidc/client-scope-mapper-menu.png b/docs/images/sso/keycloak-oidc/client-scope-mapper-menu.png new file mode 100644 index 0000000000..141bc5dd0b Binary files /dev/null and b/docs/images/sso/keycloak-oidc/client-scope-mapper-menu.png differ diff --git a/docs/images/sso/keycloak-oidc/client-secret.png b/docs/images/sso/keycloak-oidc/client-secret.png new file mode 100644 index 0000000000..c91ddb164b Binary files /dev/null and b/docs/images/sso/keycloak-oidc/client-secret.png differ diff --git a/docs/images/sso/keycloak-oidc/clients-list.png b/docs/images/sso/keycloak-oidc/clients-list.png new file mode 100644 index 0000000000..50e4e49cb9 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/clients-list.png differ diff --git a/docs/images/sso/keycloak-oidc/create-client-capability.png b/docs/images/sso/keycloak-oidc/create-client-capability.png new file mode 100644 index 0000000000..72aa508505 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/create-client-capability.png differ diff --git a/docs/images/sso/keycloak-oidc/create-client-general-settings.png b/docs/images/sso/keycloak-oidc/create-client-general-settings.png new file mode 100644 index 0000000000..87ec8d8370 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/create-client-general-settings.png differ diff --git a/docs/images/sso/keycloak-oidc/create-client-login-settings.png b/docs/images/sso/keycloak-oidc/create-client-login-settings.png new file mode 100644 index 0000000000..1c839f8d46 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/create-client-login-settings.png differ diff --git a/docs/images/sso/keycloak-oidc/create-oidc.png b/docs/images/sso/keycloak-oidc/create-oidc.png new file mode 100644 index 0000000000..358af1330a Binary files /dev/null and b/docs/images/sso/keycloak-oidc/create-oidc.png differ diff --git a/docs/images/sso/keycloak-oidc/enable-oidc.png b/docs/images/sso/keycloak-oidc/enable-oidc.png new file mode 100644 index 0000000000..0a43f22ede Binary files /dev/null and b/docs/images/sso/keycloak-oidc/enable-oidc.png differ diff --git a/docs/images/sso/keycloak-oidc/manage-org-oidc.png b/docs/images/sso/keycloak-oidc/manage-org-oidc.png new file mode 100644 index 0000000000..f5778b97a7 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/manage-org-oidc.png differ diff --git a/docs/images/sso/keycloak-oidc/realm-setting-oidc-config.png b/docs/images/sso/keycloak-oidc/realm-setting-oidc-config.png new file mode 100644 index 0000000000..9d3866c7e7 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/realm-setting-oidc-config.png differ diff --git a/docs/images/sso/keycloak-oidc/scope-predefined-mapper-1.png b/docs/images/sso/keycloak-oidc/scope-predefined-mapper-1.png new file mode 100644 index 0000000000..8b1cb16c48 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/scope-predefined-mapper-1.png differ diff --git a/docs/images/sso/keycloak-oidc/scope-predefined-mapper-2.png b/docs/images/sso/keycloak-oidc/scope-predefined-mapper-2.png new file mode 100644 index 0000000000..189cb34b27 Binary files /dev/null and b/docs/images/sso/keycloak-oidc/scope-predefined-mapper-2.png differ diff --git a/docs/images/webhook-create.png b/docs/images/webhook-create.png new file mode 100644 index 0000000000..e73f14767c Binary files /dev/null and b/docs/images/webhook-create.png differ diff --git a/docs/integrations/cicd/bitbucket.mdx b/docs/integrations/cicd/bitbucket.mdx index 8f29e43a5f..3c1330308d 100644 --- a/docs/integrations/cicd/bitbucket.mdx +++ b/docs/integrations/cicd/bitbucket.mdx @@ -3,30 +3,74 @@ title: "Bitbucket" description: "How to sync secrets from Infisical to Bitbucket" --- +Infisical lets you sync secrets to Bitbucket at the repository-level and deployment environment-level. + + Prerequisites: - Set up and add envars to [Infisical Cloud](https://app.infisical.com) - - - Navigate to your project's integrations tab in Infisical. + + + + + Navigate to your project's integrations tab in Infisical. - ![integrations](../../images/integrations.png) + ![integrations](/images/integrations.png) - Press on the Bitbucket tile and grant Infisical access to your Bitbucket account. + Press on the Bitbucket tile and grant Infisical access to your Bitbucket account. - ![integrations bitbucket authorization](../../images/integrations/bitbucket/integrations-bitbucket-auth.png) + ![integrations bitbucket authorization](/images/integrations/bitbucket/integrations-bitbucket.png) + + + Select which workspace, repository, and optionally, deployment environment, you'd like to sync your secrets + to. + ![integrations configure + bitbucket](/images/integrations/bitbucket/integrations-bitbucket-configuration.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - - - - Select which Infisical environment secrets you want to sync to which Bitbucket repo and press start integration to start syncing secrets to the repo. + Once created, your integration will begin syncing secrets to the configured repository or deployment + environment. - ![integrations bitbucket](../../images/integrations/bitbucket/integrations-bitbucket.png) - - \ No newline at end of file + ![integrations bitbucket](/images/integrations/bitbucket/integrations-bitbucket.png) + + + + + + + + Configure a [Machine Identity](https://infisical.com/docs/documentation/platform/identities/universal-auth) for your project and give it permissions to read secrets from your desired Infisical projects and environments. + + + Create Bitbucket variables (can be either workspace, repository, or deployment-level) to store Machine Identity Client ID and Client Secret. + + ![integrations bitbucket](/images/integrations/bitbucket/integrations-bitbucket-env.png) + + + Edit your Bitbucket pipeline YAML file to include the use of the Infisical CLI to fetch and inject secrets into any script or command within the pipeline. + + #### Example + + ```yaml + image: atlassian/default-image:3 + + pipelines: + default: + - step: + name: Build application with secrets from Infisical + script: + - apt update && apt install -y curl + - curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash + - apt-get update && apt-get install -y infisical + - export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=$INFISICAL_CLIENT_ID --client-secret=$INFISICAL_CLIENT_SECRET --silent --plain) + - infisical run --projectId=1d0443c1-cd43-4b3a-91a3-9d5f81254a89 --env=dev -- npm run build + ``` + + + Set the values of `projectId` and `env` flags in the `infisical run` command to your intended source path. For more options, refer to the CLI command reference [here](https://infisical.com/docs/cli/commands/run). + + + + + + diff --git a/docs/integrations/cicd/circleci.mdx b/docs/integrations/cicd/circleci.mdx index ed14b891ea..0753f40f7b 100644 --- a/docs/integrations/cicd/circleci.mdx +++ b/docs/integrations/cicd/circleci.mdx @@ -21,12 +21,6 @@ Prerequisites: ![integrations circleci authorization](../../images/integrations/circleci/integrations-circleci-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which CircleCI project and press create integration to start syncing secrets to CircleCI. diff --git a/docs/integrations/cicd/codefresh.mdx b/docs/integrations/cicd/codefresh.mdx index e41e00f50c..cf69ae04d3 100644 --- a/docs/integrations/cicd/codefresh.mdx +++ b/docs/integrations/cicd/codefresh.mdx @@ -22,12 +22,6 @@ Prerequisites: ![integrations codefresh authorization](../../images/integrations/codefresh/integrations-codefresh-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Codefresh service and press create integration to start syncing secrets to Codefresh. diff --git a/docs/integrations/cicd/githubactions.mdx b/docs/integrations/cicd/githubactions.mdx index 936c8974ac..82076874c8 100644 --- a/docs/integrations/cicd/githubactions.mdx +++ b/docs/integrations/cicd/githubactions.mdx @@ -4,33 +4,32 @@ description: "How to sync secrets from Infisical to GitHub Actions" --- - Alternatively, you can use Infisical's official Github Action + Alternatively, you can use Infisical's official GitHub Action [here](https://github.com/Infisical/secrets-action). Infisical lets you sync secrets to GitHub at the organization-level, repository-level, and repository environment-level. -Prerequisites: - -- Set up and add envars to [Infisical Cloud](https://app.infisical.com) -- Ensure that you have admin privileges to the repository you want to sync secrets to. +## Connecting with GitHub App (Recommended) - - Navigate to your project's integrations tab in Infisical. + + Navigate to your project's integrations tab in Infisical and press on the GitHub tile. - ![integrations](../../images/integrations.png) + ![integrations](../../images/integrations/github/app/integration-overview.png) - Press on the GitHub tile and grant Infisical access to your GitHub account (repo privileges only). + Select GitHub App as the authentication method and click **Connect to GitHub**. - ![integrations github authorization](../../images/integrations/github/integrations-github-auth.png) + ![integrations github app auth selection](../../images/integrations/github/app/github-app-method-selection.png) + + You will then be redirected to the GitHub app installation page. + + ![integrations github app installation](../../images/integrations/github/app/github-app-installation.png) + + Install and authorize the GitHub application. This will redirect you back to the Infisical integration page. - - If this is your project's first cloud integration, then you'll have to grant Infisical access to your project's environment variables. - Although this step breaks E2EE, it's necessary for Infisical to sync the environment variables to the cloud platform. - Select which Infisical environment secrets you want to sync to which GitHub organization, repository, or repository environment. @@ -41,6 +40,121 @@ Prerequisites: ![integrations github](../../images/integrations/github/integrations-github-scope-org.png) + + When using the organization scope, your secrets will be saved in the top-level of your GitHub Organization. + + You can choose the visibility, which defines which repositories can access the secrets. The options are: + - **All public repositories**: All public repositories in the organization can access the secrets. + - **All private repositories**: All private repositories in the organization can access the secrets. + - **Selected repositories**: Only the selected repositories can access the secrets. This gives a more fine-grained control over which repositories can access the secrets. You can select _both_ private and public repositories with this option. + + + ![integrations github](../../images/integrations/github/integrations-github-scope-env.png) + + + + Finally, press create integration to start syncing secrets to GitHub. + + ![integrations github](../../images/integrations/github/integrations-github.png) + + + + + + Using the GitHub integration with app authentication on a self-hosted instance of Infisical requires configuring an application on GitHub + and registering your instance with it. + + + Navigate to the GitHub app settings [here](https://github.com/settings/apps). Click **New GitHub App**. + + ![integrations github app create](../../images/integrations/github/app/self-hosted-github-app-create.png) + + Give the application a name, a homepage URL (your self-hosted domain i.e. `https://your-domain.com`), and a callback URL (i.e. `https://your-domain.com/integrations/github/oauth2/callback`). + + ![integrations github app basic details](../../images/integrations/github/app/self-hosted-github-app-basic-details.png) + + Enable request user authorization during app installation. + ![integrations github app enable auth](../../images/integrations/github/app/self-hosted-github-app-enable-oauth.png) + + Disable webhook by unchecking the Active checkbox. + ![integrations github app webhook](../../images/integrations/github/app/self-hosted-github-app-webhook.png) + + Set the repository permissions as follows: Metadata: Read-only, Secrets: Read and write, Environments: Read and write, Actions: Read. + ![integrations github app repository](../../images/integrations/github/app/self-hosted-github-app-repository.png) + + Similarly, set the organization permissions as follows: Secrets: Read and write. + ![integrations github app organization](../../images/integrations/github/app/self-hosted-github-app-organization.png) + + Create the Github application. + ![integrations github app create confirm](../../images/integrations/github/app/self-hosted-github-app-create-confirm.png) + + + If you have a GitHub organization, you can create an application under it + in your organization Settings > Developer settings > GitHub Apps > New GitHub App. + + + + Generate a new **Client Secret** for your GitHub application. + ![integrations github app create secret](../../images/integrations/github/app/self-hosted-github-app-secret.png) + + Generate a new **Private Key** for your Github application. + ![integrations github app create private key](../../images/integrations/github/app/self-hosted-github-app-private-key.png) + + Obtain the necessary Github application credentials. This would be the application slug, client ID, app ID, client secret, and private key. + ![integrations github app credentials](../../images/integrations/github/app/self-hosted-github-app-credentials.png) + + Back in your Infisical instance, add the five new environment variables for the credentials of your GitHub application: + + - `CLIENT_ID_GITHUB_APP`: The **Client ID** of your GitHub application. + - `CLIENT_SECRET_GITHUB_APP`: The **Client Secret** of your GitHub application. + - `CLIENT_SLUG_GITHUB_APP`: The **Slug** of your GitHub application. This is the one found in the URL. + - `CLIENT_APP_ID_GITHUB_APP`: The **App ID** of your GitHub application. + - `CLIENT_PRIVATE_KEY_GITHUB_APP`: The **Private Key** of your GitHub application. + + Once added, restart your Infisical instance and use the GitHub integration via app authentication. + + + + + + +## Connecting with GitHub OAuth + +Prerequisites: + +- Set up and add envars to [Infisical Cloud](https://app.infisical.com) +- Ensure that you have admin privileges to the repository you want to sync secrets to. + + + + + + Navigate to your project's integrations tab in Infisical and press on the GitHub tile. + ![integrations](../../images/integrations/github/integration-overview.png) + + Select OAuth as the authentication method and click **Connect to GitHub**. + ![integrations github oauth auth selection](../../images/integrations/github/github-oauth-method-selection.png) + + Grant Infisical access to your GitHub account (organization and repo privileges). + ![integrations github authorization](../../images/integrations/github/integrations-github-auth.png) + + + + Select which Infisical environment secrets you want to sync to which GitHub organization, repository, or repository environment. + + + + ![integrations github](../../images/integrations/github/integrations-github-scope-repo.png) + + + ![integrations github](../../images/integrations/github/integrations-github-scope-org.png) + + When using the organization scope, your secrets will be saved in the top-level of your GitHub Organization. + + You can choose the visibility, which defines which repositories can access the secrets. The options are: + - **All public repositories**: All public repositories in the organization can access the secrets. + - **All private repositories**: All private repositories in the organization can access the secrets. + - **Selected repositories**: Only the selected repositories can access the secrets. This gives a more fine-grained control over which repositories can access the secrets. You can select _both_ private and public repositories with this option. ![integrations github](../../images/integrations/github/integrations-github-scope-env.png) diff --git a/docs/integrations/cicd/gitlab.mdx b/docs/integrations/cicd/gitlab.mdx index 976c81a6d3..2da61ef770 100644 --- a/docs/integrations/cicd/gitlab.mdx +++ b/docs/integrations/cicd/gitlab.mdx @@ -20,12 +20,6 @@ description: "How to sync secrets from Infisical to GitLab" ![integrations gitlab authorization](../../images/integrations/gitlab/integrations-gitlab-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which GitLab repository and press create integration to start syncing secrets to GitLab. @@ -77,11 +71,12 @@ description: "How to sync secrets from Infisical to GitLab" + Using the GitLab integration on a self-hosted instance of Infisical requires configuring an application in GitLab and registering your instance with it. - + If you're self-hosting Gitlab with custom certificates, you will have to configure your Infisical instance to trust these certificates. To learn how, please follow [this guide](../../self-hosting/guides/custom-certificates). Navigate to your user Settings > Applications to create a new GitLab application. @@ -91,8 +86,8 @@ description: "How to sync secrets from Infisical to GitLab" Create the application. As part of the form, set the **Redirect URI** to `https://your-domain.com/integrations/gitlab/oauth2/callback`. - ![integrations gitlab config](../../images/integrations/gitlab/integrations-gitlab-config-new-app-form.png) - + ![integrations gitlab config](../../images/integrations/gitlab/integrations-gitlab-config-new-app-form.png) + If you have a GitLab group, you can create an OAuth application under it in your group Settings > Applications. @@ -100,17 +95,17 @@ description: "How to sync secrets from Infisical to GitLab" Obtain the **Application ID** and **Secret** for your GitLab application. - - ![integrations gitlab config](../../images/integrations/gitlab/integrations-gitlab-config-credentials.png) - + + ![integrations gitlab config](../../images/integrations/gitlab/integrations-gitlab-config-credentials.png) + Back in your Infisical instance, add two new environment variables for the credentials of your GitLab application: - `CLIENT_ID_GITLAB`: The **Client ID** of your GitLab application. - `CLIENT_SECRET_GITLAB`: The **Secret** of your GitLab application. - + Once added, restart your Infisical instance and use the GitLab integration. + - diff --git a/docs/integrations/cicd/jenkins.mdx b/docs/integrations/cicd/jenkins.mdx index a83d90700d..518237fc3a 100644 --- a/docs/integrations/cicd/jenkins.mdx +++ b/docs/integrations/cicd/jenkins.mdx @@ -138,16 +138,9 @@ Prerequisites: - + ## Add Infisical Service Token to Jenkins - - Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). - They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - - **Please use our Jenkins Plugin instead!** - - After setting up your project in Infisical and installing the Infisical CLI to the environment where your Jenkins builds will run, you will need to add the Infisical Service Token to Jenkins. To generate a Infisical service token, follow the guide [here](/documentation/platform/token). diff --git a/docs/integrations/cicd/rundeck.mdx b/docs/integrations/cicd/rundeck.mdx index a0743fd01f..bda7d81627 100644 --- a/docs/integrations/cicd/rundeck.mdx +++ b/docs/integrations/cicd/rundeck.mdx @@ -21,13 +21,6 @@ Prerequisites: ![integrations rundeck authorization](../../images/integrations/rundeck/integrations-rundeck-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - - Select which Infisical environment secrets you want to sync to a Rundeck Key Storage Path and press create integration to start syncing secrets to Rundeck. diff --git a/docs/integrations/cicd/travisci.mdx b/docs/integrations/cicd/travisci.mdx index 4e70c76962..873c371b60 100644 --- a/docs/integrations/cicd/travisci.mdx +++ b/docs/integrations/cicd/travisci.mdx @@ -21,12 +21,6 @@ Prerequisites: ![integrations travis ci authorization](../../images/integrations/travis-ci/integrations-travisci-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Travis CI repository and press create integration to start syncing secrets to Travis CI. diff --git a/docs/integrations/cloud/aws-amplify.mdx b/docs/integrations/cloud/aws-amplify.mdx index 7619710250..6d3123b10f 100644 --- a/docs/integrations/cloud/aws-amplify.mdx +++ b/docs/integrations/cloud/aws-amplify.mdx @@ -62,12 +62,6 @@ This approach enables you to fetch secrets from Infisical during Amplify build t - - - Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). - - They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - diff --git a/docs/integrations/cloud/aws-parameter-store.mdx b/docs/integrations/cloud/aws-parameter-store.mdx index d53c557fb3..80b35b8fca 100644 --- a/docs/integrations/cloud/aws-parameter-store.mdx +++ b/docs/integrations/cloud/aws-parameter-store.mdx @@ -3,82 +3,197 @@ title: "AWS Parameter Store" description: "Learn how to sync secrets from Infisical to AWS Parameter Store." --- -Prerequisites: + + + Infisical will assume the provided role in your AWS account securely, without the need to share any credentials. -- Set up and add envars to [Infisical Cloud](https://app.infisical.com) -- Set up AWS and have/create an IAM user + Prerequisites: - - - Navigate to your IAM user permissions and add a permission policy to grant access to AWS Parameter Store. + - Set up and add envars to [Infisical Cloud](https://app.infisical.com) - ![integration IAM 1](../../images/integrations/aws/integrations-aws-iam-1.png) - ![integration IAM 2](../../images/integrations/aws/integrations-aws-parameter-store-iam-2.png) - ![integrations IAM 3](../../images/integrations/aws/integrations-aws-parameter-store-iam-3.png) + + To connect your Infisical instance with AWS, you need to set up an AWS IAM User account that can assume the AWS IAM Role for the integration. - For enhanced security, here's a custom policy containing the minimum permissions required by Infisical to sync secrets to AWS Parameter Store for the IAM user that you can use: + If your instance is deployed on AWS, the aws-sdk will automatically retrieve the credentials. Ensure that you assign the provided permission policy to your deployed instance, such as ECS or EC2. + The following steps are for instances not deployed on AWS + + + Navigate to [Create IAM User](https://console.aws.amazon.com/iamv2/home#/users/create) in your AWS Console. + + + Attach the following inline permission policy to the IAM User to allow it to assume any IAM Roles: ```json { "Version": "2012-10-17", "Statement": [ { - "Sid": "AllowSSMAccess", + "Sid": "AllowAssumeAnyRole", "Effect": "Allow", - "Action": [ - "ssm:PutParameter", - "ssm:DeleteParameter", - "ssm:GetParameters", - "ssm:GetParametersByPath", - "ssm:DeleteParameters", - "ssm:AddTagsToResource", // if you need to add tags to secrets - "kms:ListKeys", // if you need to specify the KMS key - "kms:ListAliases", // if you need to specify the KMS key - "kms:Encrypt", // if you need to specify the KMS key - "kms:Decrypt" // if you need to specify the KMS key - ], - "Resource": "*" + "Action": "sts:AssumeRole", + "Resource": "arn:aws:iam::*:role/*" } ] } ``` + + + Obtain the AWS access key ID and secret access key for your IAM User by navigating to IAM > Users > [Your User] > Security credentials > Access keys. - - - Obtain a AWS access key ID and secret access key for your IAM user in IAM > Users > User > Security credentials > Access keys + ![Access Key Step 1](../../images/integrations/aws/integrations-aws-access-key-1.png) + ![Access Key Step 2](../../images/integrations/aws/integrations-aws-access-key-2.png) + ![Access Key Step 3](../../images/integrations/aws/integrations-aws-access-key-3.png) + + + 1. Set the access key as **CLIENT_ID_AWS_INTEGRATION**. + 2. Set the secret key as **CLIENT_SECRET_AWS_INTEGRATION**. + + + - ![access key 1](../../images/integrations/aws/integrations-aws-access-key-1.png) - ![access key 2](../../images/integrations/aws/integrations-aws-access-key-2.png) - ![access key 3](../../images/integrations/aws/integrations-aws-access-key-3.png) + + + 1. Navigate to the [Create IAM Role](https://console.aws.amazon.com/iamv2/home#/roles/create?step=selectEntities) page in your AWS Console. + ![IAM Role Creation](../../images/integrations/aws/integration-aws-iam-assume-role.png) - Navigate to your project's integrations tab in Infisical. + 2. Select **AWS Account** as the **Trusted Entity Type**. + 3. Choose **Another AWS Account** and enter **381492033652** (Infisical AWS Account ID). This restricts the role to be assumed only by Infisical. If self-hosting, provide your AWS account number instead. + 4. Optionally, enable **Require external ID** and enter your **project ID** to further enhance security. + - ![integrations](../../images/integrations.png) + + ![IAM Role Permissions](../../images/integrations/aws/integration-aws-iam-assume-permission.png) + Use the following custom policy to grant the minimum permissions required by Infisical to sync secrets to AWS Parameter Store: - Press on the AWS Parameter Store tile and input your AWS access key ID and secret access key from the previous step. + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowSSMAccess", + "Effect": "Allow", + "Action": [ + "ssm:PutParameter", + "ssm:DeleteParameter", + "ssm:GetParameters", + "ssm:GetParametersByPath", + "ssm:DescribeParameters", + "ssm:DeleteParameters", + "ssm:AddTagsToResource", // if you need to add tags to secrets + "kms:ListKeys", // if you need to specify the KMS key + "kms:ListAliases", // if you need to specify the KMS key + "kms:Encrypt", // if you need to specify the KMS key + "kms:Decrypt" // if you need to specify the KMS key + ], + "Resource": "*" + } + ] + } + ``` + - ![integration auth](../../images/integrations/aws/integrations-aws-parameter-store-auth.png) + + ![Copy IAM Role ARN](../../images/integrations/aws/integration-aws-iam-assume-arn.png) + - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - + + 1. Navigate to your project's integrations tab in Infisical. + 2. Click on the **AWS Parameter Store** tile. + ![Select AWS Parameter Store](../../images/integrations.png) - - - Select which Infisical environment secrets you want to sync to which AWS Parameter Store region and indicate the path for your secrets. Then, press create integration to start syncing secrets to AWS Parameter Store. + 3. Select the **AWS Assume Role** option. + ![Select Assume Role](../../images/integrations/aws/integration-aws-parameter-store-iam-assume-select.png) - ![integration create](../../images/integrations/aws/integrations-aws-parameter-store-create.png) + 4. Provide the **AWS IAM Role ARN** obtained from the previous step and press connect. + + + Select which Infisical environment secrets you want to sync to which AWS Parameter Store region and indicate the path for your secrets. Then, press create integration to start syncing secrets to AWS Parameter Store. - - Infisical requires you to add a path for your secrets to be stored in AWS - Parameter Store and recommends setting the path structure to - `/[project_name]/[environment]/` according to best practices. This enables a - secret like `TEST` to be stored as `/[project_name]/[environment]/TEST` in AWS - Parameter Store. - + ![integration create](../../images/integrations/aws/integrations-aws-parameter-store-create.png) - - + + Infisical requires you to add a path for your secrets to be stored in AWS + Parameter Store and recommends setting the path structure to + `/[project_name]/[environment]/` according to best practices. This enables a + secret like `TEST` to be stored as `/[project_name]/[environment]/TEST` in AWS + Parameter Store. + + + + + + + Prerequisites: + + - Set up and add envars to [Infisical Cloud](https://app.infisical.com) + + + + Navigate to your IAM user permissions and add a permission policy to grant access to AWS Parameter Store. + + ![integration IAM 1](../../images/integrations/aws/integrations-aws-iam-1.png) + ![integration IAM 2](../../images/integrations/aws/integrations-aws-parameter-store-iam-2.png) + ![integrations IAM 3](../../images/integrations/aws/integrations-aws-parameter-store-iam-3.png) + + For enhanced security, here's a custom policy containing the minimum permissions required by Infisical to sync secrets to AWS Parameter Store for the IAM user that you can use: + + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowSSMAccess", + "Effect": "Allow", + "Action": [ + "ssm:PutParameter", + "ssm:DeleteParameter", + "ssm:GetParameters", + "ssm:GetParametersByPath", + "ssm:DescribeParameters", + "ssm:DeleteParameters", + "ssm:AddTagsToResource", // if you need to add tags to secrets + "kms:ListKeys", // if you need to specify the KMS key + "kms:ListAliases", // if you need to specify the KMS key + "kms:Encrypt", // if you need to specify the KMS key + "kms:Decrypt" // if you need to specify the KMS key + ], + "Resource": "*" + } + ] + } + ``` + + + + Obtain a AWS access key ID and secret access key for your IAM user in IAM > Users > User > Security credentials > Access keys + + ![access key 1](../../images/integrations/aws/integrations-aws-access-key-1.png) + ![access key 2](../../images/integrations/aws/integrations-aws-access-key-2.png) + ![access key 3](../../images/integrations/aws/integrations-aws-access-key-3.png) + + Navigate to your project's integrations tab in Infisical. + + ![integrations](../../images/integrations.png) + + Press on the AWS Parameter Store tile and select Access Key as the authentication mode. Input your AWS access key ID and secret access key from the previous step. + + ![integration auth](../../images/integrations/aws/integrations-aws-parameter-store-auth.png) + + + + Select which Infisical environment secrets you want to sync to which AWS Parameter Store region and indicate the path for your secrets. Then, press create integration to start syncing secrets to AWS Parameter Store. + + ![integration create](../../images/integrations/aws/integrations-aws-parameter-store-create.png) + + + Infisical requires you to add a path for your secrets to be stored in AWS + Parameter Store and recommends setting the path structure to + `/[project_name]/[environment]/` according to best practices. This enables a + secret like `TEST` to be stored as `/[project_name]/[environment]/TEST` in AWS + Parameter Store. + + + + + + diff --git a/docs/integrations/cloud/aws-secret-manager.mdx b/docs/integrations/cloud/aws-secret-manager.mdx index 9b3a8a2f83..64df1df321 100644 --- a/docs/integrations/cloud/aws-secret-manager.mdx +++ b/docs/integrations/cloud/aws-secret-manager.mdx @@ -3,6 +3,156 @@ title: "AWS Secrets Manager" description: "Learn how to sync secrets from Infisical to AWS Secrets Manager." --- + + +Infisical will assume the provided role in your AWS account securely, without the need to share any credentials. + +Prerequisites: + +- Set up and add envars to [Infisical Cloud](https://app.infisical.com) + + + To connect your Infisical instance with AWS, you need to set up an AWS IAM User account that can assume the AWS IAM Role for the integration. + +If your instance is deployed on AWS, the aws-sdk will automatically retrieve the credentials. Ensure that you assign the provided permission policy to your deployed instance, such as ECS or EC2. + +The following steps are for instances not deployed on AWS + + + Navigate to [Create IAM User](https://console.aws.amazon.com/iamv2/home#/users/create) in your AWS Console. + + + Attach the following inline permission policy to the IAM User to allow it to assume any IAM Roles: +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowAssumeAnyRole", + "Effect": "Allow", + "Action": "sts:AssumeRole", + "Resource": "arn:aws:iam::*:role/*" + } + ] +} +``` + + + Obtain the AWS access key ID and secret access key for your IAM User by navigating to IAM > Users > [Your User] > Security credentials > Access keys. + + ![Access Key Step 1](../../images/integrations/aws/integrations-aws-access-key-1.png) + ![Access Key Step 2](../../images/integrations/aws/integrations-aws-access-key-2.png) + ![Access Key Step 3](../../images/integrations/aws/integrations-aws-access-key-3.png) + + + 1. Set the access key as **CLIENT_ID_AWS_INTEGRATION**. + 2. Set the secret key as **CLIENT_SECRET_AWS_INTEGRATION**. + + + + + + + 1. Navigate to the [Create IAM Role](https://console.aws.amazon.com/iamv2/home#/roles/create?step=selectEntities) page in your AWS Console. + ![IAM Role Creation](../../images/integrations/aws/integration-aws-iam-assume-role.png) + + 2. Select **AWS Account** as the **Trusted Entity Type**. + 3. Choose **Another AWS Account** and enter **381492033652** (Infisical AWS Account ID). This restricts the role to be assumed only by Infisical. If self-hosting, provide your AWS account number instead. + 4. Optionally, enable **Require external ID** and enter your **project ID** to further enhance security. + + + + ![IAM Role Permissions](../../images/integrations/aws/integration-aws-iam-assume-permission.png) + Use the following custom policy to grant the minimum permissions required by Infisical to sync secrets to AWS Secrets Manager: + + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowSecretsManagerAccess", + "Effect": "Allow", + "Action": [ + "secretsmanager:GetSecretValue", + "secretsmanager:CreateSecret", + "secretsmanager:UpdateSecret", + "secretsmanager:DescribeSecret", + "secretsmanager:TagResource", + "secretsmanager:UntagResource", + "kms:ListKeys", + "kms:ListAliases", + "kms:Encrypt", + "kms:Decrypt" + ], + "Resource": "*" + } + ] + } + ``` + + + + ![Copy IAM Role ARN](../../images/integrations/aws/integration-aws-iam-assume-arn.png) + + + + 1. Navigate to your project's integrations tab in Infisical. + 2. Click on the **AWS Secrets Manager** tile. + ![Select AWS Secrets Manager](../../images/integrations.png) + + 3. Select the **AWS Assume Role** option. + ![Select Assume Role](../../images/integrations/aws/integration-aws-iam-assume-select.png) + + 4. Provide the **AWS IAM Role ARN** obtained from the previous step. + + Select how you want to integration to work by specifying a number of parameters: + + + The environment in Infisical from which you want to sync secrets to AWS Secrets Manager. + + + The path within the preselected environment form which you want to sync secrets to AWS Secrets Manager. + + + The region that you want to integrate with in AWS Secrets Manager. + + + How you want the integration to map the secrets. The selected value could be either one to one or one to many. + + + The secret name/path in AWS into which you want to sync the secrets from Infisical. + + + ![integration create](../../images/integrations/aws/integrations-aws-secret-manager-create.png) + + Optionally, you can add tags or specify the encryption key of all the secrets created via this integration: + + + The Key/Value of a tag that will be added to secrets in AWS. Please note that it is possible to add multiple tags via API. + + + The alias/ID of the AWS KMS key used for encryption. Please note that key should be enabled in order to work and the IAM user should have access to it. + + ![integration options](../../images/integrations/aws/integrations-aws-secret-manager-options.png) + + Then, press `Create Integration` to start syncing secrets to AWS Secrets Manager. + + + Infisical currently syncs environment variables to AWS Secrets Manager as + key-value pairs under one secret. We're actively exploring ways to help users + group environment variable key-pairs under multiple secrets for greater + control. + + + Please note that upon deleting secrets in Infisical, AWS Secrets Manager immediately makes the secrets inaccessible but only schedules them for deletion after at least 7 days. + + + + + + +Infisical will access your account using the provided AWS access key and secret key. + Prerequisites: - Set up and add envars to [Infisical Cloud](https://app.infisical.com) @@ -51,13 +201,13 @@ Prerequisites: ![access key 2](../../images/integrations/aws/integrations-aws-access-key-2.png) ![access key 3](../../images/integrations/aws/integrations-aws-access-key-3.png) - Navigate to your project's integrations tab in Infisical. + 1. Navigate to your project's integrations tab in Infisical. + 2. Click on the **AWS Secrets Manager** tile. + ![Select AWS Secrets Manager](../../images/integrations.png) - ![integrations](../../images/integrations.png) - - Press on the AWS Secrets Manager tile and input your AWS access key ID and secret access key from the previous step. - - ![integration auth](../../images/integrations/aws/integrations-aws-secret-manager-auth.png) + 3. Select the **Access Key** option for Authentication Mode. + ![Select Access Key](../../images/integrations/aws/integrations-aws-secret-manager-auth.png) + 4. Provide the **access key** and **secret key** for the AWS Iam User. @@ -105,3 +255,5 @@ Prerequisites: + + diff --git a/docs/integrations/cloud/azure-app-configuration.mdx b/docs/integrations/cloud/azure-app-configuration.mdx new file mode 100644 index 0000000000..2493257048 --- /dev/null +++ b/docs/integrations/cloud/azure-app-configuration.mdx @@ -0,0 +1,82 @@ +--- +title: "Azure App Configuration" +description: "How to sync secrets from Infisical to Azure App Configuration" +--- + + + + **Prerequisites:** + + - Set up and add envars to [Infisical Cloud](https://app.infisical.com). + - Set up Azure and have an existing App Configuration instance. + - User setting up the integration on Infisical must have the `App Configuration Data Owner` role for the intended Azure App Configuration instance. + - Azure App Configuration instance must be reachable by Infisical. + + + + Navigate to your project's integrations tab + + ![integrations](../../images/integrations/azure-app-configuration/new-infisical-integration.png) + + Press on the Azure App Configuration tile and grant Infisical access to App Configuration. + + + Obtain the Azure App Configuration endpoint from the overview tab. + ![integrations](../../images/integrations/azure-app-configuration/azure-app-config-endpoint.png) + + Select which Infisical environment secrets you want to sync to your Azure App Configuration. Then, input your App Configuration instance endpoint. Optionally, you can define a prefix for your secrets which will be appended to the keys upon syncing. + + ![integrations](../../images/integrations/azure-app-configuration/create-integration-form.png) + + Press create integration to start syncing secrets to Azure App Configuration. + + + + + + Using the Azure App Configuration integration on a self-hosted instance of Infisical requires configuring an application in Azure + and registering your instance with it. + + **Prerequisites:** + + - Set up Azure and have an existing App Configuration instance. + + + + Navigate to Azure Active Directory > App registrations to create a new application. + + + Azure Active Directory is now Microsoft Entra ID. + + ![integrations Azure app config](../../images/integrations/azure-app-configuration/config-aad.png) + ![integrations Azure app config](../../images/integrations/azure-app-configuration/config-new-app.png) + + Create the application. As part of the form, set the **Redirect URI** to `https://your-domain.com/integrations/azure-app-configuration/oauth2/callback`. + + The domain you defined in the Redirect URI should be equivalent to the `SITE_URL` configured in your Infisical instance. + + + ![integrations Azure app config](../../images/integrations/azure-app-configuration/app-registration-redirect.png) + + After registration, set the API permissions of the app to include the following Azure App Configuration permissions: KeyValue.Delete, KeyValue.Read, and KeyValue.Write. + ![integrations Azure app config](../../images/integrations/azure-app-configuration/app-api-permissions.png) + + + + Obtain the **Application (Client) ID** in Overview and generate a **Client Secret** in Certificate & secrets for your Azure application. + + ![integrations Azure app config](../../images/integrations/azure-app-configuration/config-credentials-1.png) + ![integrations Azure app config](../../images/integrations/azure-app-configuration/config-credentials-2.png) + ![integrations Azure app config](../../images/integrations/azure-app-configuration/config-credentials-3.png) + + Back in your Infisical instance, add two new environment variables for the credentials of your Azure application. + + - `CLIENT_ID_AZURE`: The **Application (Client) ID** of your Azure application. + - `CLIENT_SECRET_AZURE`: The **Client Secret** of your Azure application. + + Once added, restart your Infisical instance and use the Azure App Configuration integration. + + + + + diff --git a/docs/integrations/cloud/azure-devops.mdx b/docs/integrations/cloud/azure-devops.mdx new file mode 100644 index 0000000000..6d1ba6b178 --- /dev/null +++ b/docs/integrations/cloud/azure-devops.mdx @@ -0,0 +1,55 @@ +--- +title: "Azure DevOps" +description: "How to sync secrets from Infisical to Azure DevOps" +--- + +### Usage +Prerequisites: + +- Set up and add envars to [Infisical Cloud](https://app.infisical.com). +- Create a new [Azure DevOps](https://dev.azure.com) project if you don't have one already. + + +#### Create a new Azure DevOps personal access token (PAT) +You'll need to create a new personal access token (PAT) in order to authenticate Infisical with Azure DevOps. + + + ![integrations](../../images/integrations/azure-devops/overview-page.png) + + + Make sure the newly created token has Read/Write access to the Release scope. + ![integrations](../../images/integrations/azure-devops/create-new-token.png) + + + Please make sure that the token has access to the following scopes: Variable Groups _(read/write)_, Release _(read/write)_, Project and Team _(read)_, Service Connections _(read & query)_ + + + + Copy the newly created token as this will be used to authenticate Infisical with Azure DevOps. + ![integrations](../../images/integrations/azure-devops/new-token-created.png) + + + +#### Setup the Infisical Azure DevOps integration +Navigate to your project's integrations tab and select the 'Azure DevOps' integration. +![integrations](../../images/integrations.png) + + + + Enter your credentials that you obtained from the previous step. + + 1. Azure DevOps API token is the personal access token (PAT) you created in the previous step. + 2. Azure DevOps organization name is the name of your Azure DevOps organization. + + ![integrations](../../images/integrations/azure-devops/new-infiscial-integration-step-1.png) + + + Select Infisical project and secret path you want to sync into Azure DevOps. + Finally, press create integration to start syncing secrets to Azure DevOps. + + ![integrations](../../images/integrations/azure-devops/new-infiscial-integration-step-2.png) + + + +Now you have successfully integrated Infisical with Azure DevOps. Your existing and future secret changes will automatically sync to Azure DevOps. +You can view your secrets by navigating to your Azure DevOps project and selecting the 'Library' tab under 'Pipelines' in the 'Library' section. diff --git a/docs/integrations/cloud/azure-key-vault.mdx b/docs/integrations/cloud/azure-key-vault.mdx index 3a4b68e394..d04d90b4fb 100644 --- a/docs/integrations/cloud/azure-key-vault.mdx +++ b/docs/integrations/cloud/azure-key-vault.mdx @@ -29,14 +29,15 @@ description: "How to sync secrets from Infisical to Azure Key Vault" ![integrations](../../images/integrations/azure-key-vault/integrations-azure-key-vault.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - + + The Azure Key Vault integration requires the following secrets permissions to be set on the user / service principal + for Infisical to sync secrets to Azure Key Vault: `secrets/list`, `secrets/get`, `secrets/set`, `secrets/recover`. + + Any role with these permissions would work such as the **Key Vault Secrets Officer** role. + + Using the Azure KV integration on a self-hosted instance of Infisical requires configuring an application in Azure @@ -45,29 +46,32 @@ description: "How to sync secrets from Infisical to Azure Key Vault" Navigate to Azure Active Directory > App registrations to create a new application. - - ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-aad.png) - ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-new-app.png) + + + Azure Active Directory is now Microsoft Entra ID. + + ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-aad.png) + ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-new-app.png) Create the application. As part of the form, set the **Redirect URI** to `https://your-domain.com/integrations/azure-key-vault/oauth2/callback`. - - ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-new-app-form.png) + + ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-new-app-form.png) Obtain the **Application (Client) ID** in Overview and generate a **Client Secret** in Certificate & secrets for your Azure application. - ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-credentials-1.png) - ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-credentials-2.png) - ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-credentials-3.png) - + ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-credentials-1.png) + ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-credentials-2.png) + ![integrations Azure KV config](../../images/integrations/azure-key-vault/integrations-azure-key-vault-config-credentials-3.png) + Back in your Infisical instance, add two new environment variables for the credentials of your Azure application. - `CLIENT_ID_AZURE`: The **Application (Client) ID** of your Azure application. - `CLIENT_SECRET_AZURE`: The **Client Secret** of your Azure application. - + Once added, restart your Infisical instance and use the Azure KV integration. + - diff --git a/docs/integrations/cloud/checkly.mdx b/docs/integrations/cloud/checkly.mdx index 618082a3b7..00ec38d2f1 100644 --- a/docs/integrations/cloud/checkly.mdx +++ b/docs/integrations/cloud/checkly.mdx @@ -22,12 +22,6 @@ Prerequisites: ![integrations checkly authorization](../../images/integrations/checkly/integrations-checkly-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to Checkly and press create integration to start syncing secrets. diff --git a/docs/integrations/cloud/cloud-66.mdx b/docs/integrations/cloud/cloud-66.mdx index ab362137cb..c087f65644 100644 --- a/docs/integrations/cloud/cloud-66.mdx +++ b/docs/integrations/cloud/cloud-66.mdx @@ -31,13 +31,6 @@ Copy and save your token. Click on the Cloud 66 tile and enter your API token to grant Infisical access to your Cloud 66 account. ![integrations cloud 66 tile in infisical dashboard](../../images/integrations/cloud-66/integrations-cloud-66-infisical-dashboard.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - - Enter your Cloud 66 Personal Access Token here. Then click "Connect to Cloud 66". ![integrations cloud 66 tile in infisical dashboard](../../images/integrations/cloud-66/integrations-cloud-66-paste-pat.png) diff --git a/docs/integrations/cloud/cloudflare-pages.mdx b/docs/integrations/cloud/cloudflare-pages.mdx index 4d28cc5740..addba4fcdf 100644 --- a/docs/integrations/cloud/cloudflare-pages.mdx +++ b/docs/integrations/cloud/cloudflare-pages.mdx @@ -29,12 +29,6 @@ Prerequisites: ![integrations cloudflare authorization](../../images/integrations/cloudflare/integrations-cloudflare-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to Cloudflare and press create integration to start syncing secrets. diff --git a/docs/integrations/cloud/cloudflare-workers.mdx b/docs/integrations/cloud/cloudflare-workers.mdx index de81ca0835..10a5797016 100644 --- a/docs/integrations/cloud/cloudflare-workers.mdx +++ b/docs/integrations/cloud/cloudflare-workers.mdx @@ -15,7 +15,7 @@ Prerequisites: ![integrations cloudflare credentials 1](../../images/integrations/cloudflare/integrations-cloudflare-credentials-1.png) ![integrations cloudflare credentials 2](../../images/integrations/cloudflare/integrations-cloudflare-credentials-2.png) - ![integrations cloudflare credentials 3](../../images/integrations/cloudflare/integrations-cloudflare-credentials-3.png) + ![integrations cloudflare credentials 3](../../images/integrations/cloudflare/integrations-cloudflare-workers-permission.png) Copy your [Account ID](https://developers.cloudflare.com/fundamentals/get-started/basic-tasks/find-account-and-zone-ids/) from Account > Workers & Pages > Overview @@ -29,16 +29,11 @@ Prerequisites: ![integrations cloudflare authorization](../../images/integrations/cloudflare/integration-cloudflare-workers-connect.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to Cloudflare Workers and press create integration to start syncing secrets. ![integrations cloudflare](../../images/integrations/cloudflare/integration-cloudflare-workers-create.png) + - \ No newline at end of file + diff --git a/docs/integrations/cloud/databricks.mdx b/docs/integrations/cloud/databricks.mdx new file mode 100644 index 0000000000..7fee3acd3d --- /dev/null +++ b/docs/integrations/cloud/databricks.mdx @@ -0,0 +1,31 @@ +--- +title: "Databricks" +description: "Learn how to sync secrets from Infisical to Databricks." +--- + +Prerequisites: + +- Set up and add secrets to [Infisical Cloud](https://app.infisical.com) + + + + Obtain a Personal Access Token in **User Settings** > **Developer** > **Access Tokens**. + + ![integrations databricks token](../../images/integrations/databricks/pat-token.png) + + Navigate to your project's integrations tab in Infisical. + + ![integrations](../../images/integrations.png) + + Press on the Databricks tile and enter your Databricks instance URL in the following format: `https://xxx.cloud.databricks.com`. Then, input your Databricks Access Token to grant Infisical the necessary permissions in your Databricks account. + + ![integrations databricks authorization](../../images/integrations/databricks/integrations-databricks-auth.png) + + + + Select which Infisical environment and secret path you want to sync to which Databricks scope. Then, press create integration to start syncing secrets to Databricks. + + ![create integration Databricks](../../images/integrations/databricks/integrations-databricks-create.png) + ![integrations Databricks](../../images/integrations/databricks/integrations-databricks.png) + + \ No newline at end of file diff --git a/docs/integrations/cloud/digital-ocean-app-platform.mdx b/docs/integrations/cloud/digital-ocean-app-platform.mdx index 1ed255a489..a0ed545cc4 100644 --- a/docs/integrations/cloud/digital-ocean-app-platform.mdx +++ b/docs/integrations/cloud/digital-ocean-app-platform.mdx @@ -20,13 +20,6 @@ Name it **infisical**, choose **No expiry**, and make sure to check **Write (opt Click on the **Digital Ocean App Platform** tile and enter your API token to grant Infisical access to your Digital Ocean account. ![integrations](../../images/integrations.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - - Then enter your Digital Ocean Personal Access Token here. Then click "Connect to Digital Ocean App Platform". ![integrations infisical dashboard digital ocean integration](../../images/integrations/digital-ocean/integrations-do-enter-token.png) diff --git a/docs/integrations/cloud/flyio.mdx b/docs/integrations/cloud/flyio.mdx index 71b24e2a95..2aa14a9197 100644 --- a/docs/integrations/cloud/flyio.mdx +++ b/docs/integrations/cloud/flyio.mdx @@ -22,12 +22,6 @@ Prerequisites: ![integrations fly authorization](../../images/integrations/flyio/integrations-flyio-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Fly.io app and press create integration to start syncing secrets to Fly.io. diff --git a/docs/integrations/cloud/gcp-secret-manager.mdx b/docs/integrations/cloud/gcp-secret-manager.mdx index 99edcd115a..e57a976f0b 100644 --- a/docs/integrations/cloud/gcp-secret-manager.mdx +++ b/docs/integrations/cloud/gcp-secret-manager.mdx @@ -24,12 +24,6 @@ description: "How to sync secrets from Infisical to GCP Secret Manager" ![integrations GCP authorization](../../images/integrations/gcp-secret-manager/integrations-gcp-secret-manager-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - In the **Connection** tab, select which Infisical environment secrets you want to sync to which GCP secret manager project. Lastly, press create integration to start syncing secrets to GCP secret manager. @@ -85,12 +79,6 @@ description: "How to sync secrets from Infisical to GCP Secret Manager" ![integrations GCP authorization options](../../images/integrations/gcp-secret-manager/integrations-gcp-secret-manager-auth-options.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - In the **Connection** tab, select which Infisical environment secrets you want to sync to the GCP secret manager project. Lastly, press create integration to start syncing secrets to GCP secret manager. diff --git a/docs/integrations/cloud/hasura-cloud.mdx b/docs/integrations/cloud/hasura-cloud.mdx index 48d9d301b9..f88c1eb508 100644 --- a/docs/integrations/cloud/hasura-cloud.mdx +++ b/docs/integrations/cloud/hasura-cloud.mdx @@ -21,12 +21,6 @@ Prerequisites: ![integrations hasura cloud authorization](../../images/integrations/hasura-cloud/integrations-hasura-cloud-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Hasura Cloud project and press create integration to start syncing secrets to Hasura Cloud. diff --git a/docs/integrations/cloud/heroku.mdx b/docs/integrations/cloud/heroku.mdx index 903ab82701..a63c3f3819 100644 --- a/docs/integrations/cloud/heroku.mdx +++ b/docs/integrations/cloud/heroku.mdx @@ -19,12 +19,6 @@ description: "How to sync secrets from Infisical to Heroku" ![integrations heroku authorization](../../images/integrations/heroku/integrations-heroku-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Heroku app and press create integration to start syncing secrets to Heroku. diff --git a/docs/integrations/cloud/laravel-forge.mdx b/docs/integrations/cloud/laravel-forge.mdx index 5797692ea3..c58c4a7be7 100644 --- a/docs/integrations/cloud/laravel-forge.mdx +++ b/docs/integrations/cloud/laravel-forge.mdx @@ -27,12 +27,6 @@ Prerequisites: ![integrations laravel forge authorization](../../images/integrations/laravel-forge/integrations-laravelforge-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Laravel Forge site and press create integration to start syncing secrets to Laravel Forge. diff --git a/docs/integrations/cloud/netlify.mdx b/docs/integrations/cloud/netlify.mdx index f9b20abda1..f793aae100 100644 --- a/docs/integrations/cloud/netlify.mdx +++ b/docs/integrations/cloud/netlify.mdx @@ -25,12 +25,6 @@ description: "How to sync secrets from Infisical to Netlify" ![integrations netlify authorization](../../images/integrations/netlify/integrations-netlify-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Netlify app and context. Lastly, press create integration to start syncing secrets to Netlify. diff --git a/docs/integrations/cloud/northflank.mdx b/docs/integrations/cloud/northflank.mdx index 117ac73a8a..10dcb288eb 100644 --- a/docs/integrations/cloud/northflank.mdx +++ b/docs/integrations/cloud/northflank.mdx @@ -23,12 +23,6 @@ Prerequisites: ![integrations northflank authorization](../../images/integrations/northflank/integrations-northflank-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Northflank project and secret group. Finally, press create integration to start syncing secrets to Northflank. diff --git a/docs/integrations/cloud/qovery.mdx b/docs/integrations/cloud/qovery.mdx index 6eae9b954e..13aa6af464 100644 --- a/docs/integrations/cloud/qovery.mdx +++ b/docs/integrations/cloud/qovery.mdx @@ -21,12 +21,6 @@ Prerequisites: ![integrations qovery authorization](../../images/integrations/qovery/integrations-qovery-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it is necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to Qovery and press create integration to start syncing secrets. diff --git a/docs/integrations/cloud/railway.mdx b/docs/integrations/cloud/railway.mdx index 3f08d75a09..77b3155170 100644 --- a/docs/integrations/cloud/railway.mdx +++ b/docs/integrations/cloud/railway.mdx @@ -30,12 +30,6 @@ Prerequisites: ![integrations railway authorization](../../images/integrations/railway/integrations-railway-authorization.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Railway project and environment (and optionally service). Lastly, press create integration to start syncing secrets to Railway. diff --git a/docs/integrations/cloud/render.mdx b/docs/integrations/cloud/render.mdx index 970789a31c..3663161711 100644 --- a/docs/integrations/cloud/render.mdx +++ b/docs/integrations/cloud/render.mdx @@ -22,12 +22,6 @@ Prerequisites: ![integrations render authorization](../../images/integrations/render/integrations-render-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Render service and press create integration to start syncing secrets to Render. diff --git a/docs/integrations/cloud/supabase.mdx b/docs/integrations/cloud/supabase.mdx index 3e94c0f51e..b5179c45fc 100644 --- a/docs/integrations/cloud/supabase.mdx +++ b/docs/integrations/cloud/supabase.mdx @@ -28,12 +28,6 @@ Prerequisites: ![integrations supabase authorization](../../images/integrations/supabase/integrations-supabase-authorization.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Supabase project. Lastly, press create integration to start syncing secrets to Supabase. diff --git a/docs/integrations/cloud/teamcity.mdx b/docs/integrations/cloud/teamcity.mdx index 20a9dfbea1..3e713cc6a7 100644 --- a/docs/integrations/cloud/teamcity.mdx +++ b/docs/integrations/cloud/teamcity.mdx @@ -28,12 +28,6 @@ Prerequisites: ![integrations teamcity authorization](../../images/integrations/teamcity/integrations-teamcity-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which TeamCity project (and optionally build configuration) and press create integration to start syncing secrets to TeamCity. diff --git a/docs/integrations/cloud/terraform-cloud.mdx b/docs/integrations/cloud/terraform-cloud.mdx index 3fd70df73f..d68e8a14fd 100644 --- a/docs/integrations/cloud/terraform-cloud.mdx +++ b/docs/integrations/cloud/terraform-cloud.mdx @@ -27,12 +27,6 @@ Prerequisites: ![integrations terraform cloud authorization](../../images/integrations/terraform/integrations-terraformcloud-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets and Terraform Cloud variable type you want to sync to which Terraform Cloud workspace/project and press create integration to start syncing secrets to Terraform Cloud. @@ -40,4 +34,4 @@ Prerequisites: ![integrations terraform cloud](../../images/integrations/terraform/integrations-terraformcloud-create.png) ![integrations terraform cloud](../../images/integrations/terraform/integrations-terraformcloud.png) - \ No newline at end of file + diff --git a/docs/integrations/cloud/vercel.mdx b/docs/integrations/cloud/vercel.mdx index a88c6616bd..1cb1c06c38 100644 --- a/docs/integrations/cloud/vercel.mdx +++ b/docs/integrations/cloud/vercel.mdx @@ -17,13 +17,6 @@ description: "How to sync secrets from Infisical to Vercel" Press on the Vercel tile and grant Infisical access to your Vercel account. ![integrations vercel authorization](../../images/integrations/vercel/integrations-vercel-auth.png) - - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Vercel app and environment. Lastly, press create integration to start syncing secrets to Vercel. diff --git a/docs/integrations/cloud/windmill.mdx b/docs/integrations/cloud/windmill.mdx index 0fc6fddeb7..d0b2b9643a 100644 --- a/docs/integrations/cloud/windmill.mdx +++ b/docs/integrations/cloud/windmill.mdx @@ -22,12 +22,6 @@ Prerequisites: ![integrations windmill authorization](../../images/integrations/windmill/integrations-windmill-auth.png) - - If this is your project's first cloud integration, then you'll have to grant - Infisical access to your project's environment variables. Although this step - breaks E2EE, it's necessary for Infisical to sync the environment variables to - the cloud platform. - Select which Infisical environment secrets you want to sync to which Windmill workspace and press create integration to start syncing secrets to Windmill. diff --git a/docs/integrations/frameworks/terraform.mdx b/docs/integrations/frameworks/terraform.mdx index dfacfbc57b..e7a60ac7e4 100644 --- a/docs/integrations/frameworks/terraform.mdx +++ b/docs/integrations/frameworks/terraform.mdx @@ -33,7 +33,7 @@ Set up the Infisical provider by specifying the `host` and `service_token`. Repl ```hcl main.tf provider "infisical" { - host = "https://app.infisical.com" # Only required if using self hosted instance of Infisical, default is https://app.infisical.com + host = "https://app.infisical.com" # Only required if using a self-hosted instance of Infisical, default is https://app.infisical.com client_id = "<>" client_secret = "<>" service_token = "<>" # DEPRECATED, USE MACHINE IDENTITY AUTH INSTEAD diff --git a/docs/integrations/overview.mdx b/docs/integrations/overview.mdx index b29db8420c..f138ee8ec9 100644 --- a/docs/integrations/overview.mdx +++ b/docs/integrations/overview.mdx @@ -57,4 +57,4 @@ Missing an integration? [Throw in a request](https://github.com/Infisical/infisi | [Flask](/integrations/frameworks/flask) | Framework | Available | | [Laravel](/integrations/frameworks/laravel) | Framework | Available | | [Ruby on Rails](/integrations/frameworks/rails) | Framework | Available | -| Jenkins | CI/CD | Coming soon | +| Jenkins | CI/CD | Available | diff --git a/docs/integrations/platforms/ansible.mdx b/docs/integrations/platforms/ansible.mdx index ad95d0d5dc..321dbec6ea 100644 --- a/docs/integrations/platforms/ansible.mdx +++ b/docs/integrations/platforms/ansible.mdx @@ -3,7 +3,48 @@ title: "Ansible" description: "Learn how to use Infisical for secret management in Ansible." --- -The documentation for using Infisical to manage secrets in Ansible is currently available [here](https://galaxy.ansible.com/ui/repo/published/infisical/vault/). +You can find the Infisical Ansible collection on [Ansible Galaxy](https://galaxy.ansible.com/ui/repo/published/infisical/vault/). + + +This Ansible Infisical collection includes a variety of Ansible content to help automate the management of Infisical services. This collection is maintained by the Infisical team. + + +## Ansible version compatibility +Tested with the Ansible Core >= 2.12.0 versions, and the current development version of Ansible. Ansible Core versions prior to 2.12.0 have not been tested. + +## Python version compatibility +This collection depends on the Infisical SDK for Python. + +Requires Python 3.7 or greater. + +## Installing this collection +You can install the Infisical collection with the Ansible Galaxy CLI: + +```bash +$ ansible-galaxy collection install infisical.vault +``` + +The python module dependencies are not installed by ansible-galaxy. They can be manually installed using pip: + +```bash +$ pip install infisical-python +``` + +## Using this collection + +You can either call modules by their Fully Qualified Collection Name (FQCN), such as `infisical.vault.read_secrets`, or you can call modules by their short name if you list the `infisical.vault` collection in the playbook's collections keyword: + + +```bash +--- +vars: + read_all_secrets_within_scope: "{{ lookup('infisical.vault.read_secrets', universal_auth_client_id='<>', universal_auth_client_secret='<>', project_id='<>', path='/', env_slug='dev', url='https://spotify.infisical.com') }}" + # [{ "key": "HOST", "value": "google.com" }, { "key": "SMTP", "value": "gmail.smtp.edu" }] + + read_secret_by_name_within_scope: "{{ lookup('infisical.vault.read_secrets', universal_auth_client_id='<>', universal_auth_client_secret='<>', project_id='<>', path='/', env_slug='dev', secret_name='HOST', url='https://spotify.infisical.com') }}" + # [{ "key": "HOST", "value": "google.com" }] +``` + ## Troubleshoot diff --git a/docs/integrations/platforms/docker-compose.mdx b/docs/integrations/platforms/docker-compose.mdx index 47715eb942..09bd82ff7d 100644 --- a/docs/integrations/platforms/docker-compose.mdx +++ b/docs/integrations/platforms/docker-compose.mdx @@ -17,13 +17,7 @@ Follow this [guide](./docker) to configure the Infisical CLI for each service th Generate a machine identity for each service you want to inject secrets into. You can do this by following the steps in the [Machine Identity](/documentation/platform/identities/machine-identities) guide. ### Set the machine identity client ID and client secret as environment variables - For each service you want to inject secrets into, set two environment variable called `INFISICAL_MACHINE_IDENTITY_CLIENT_ID`, and `INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET` equal to the client ID and client secret of the machine identity(s) you created in the previous step. - - In the example below, we set two sets of client ID and client secret for the services. - - For the web service we set `INFISICAL_MACHINE_IDENTITY_CLIENT_ID_FOR_WEB` and `INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET_FOR_WEB` as the client ID and client secret respectively. - - For the API service we set `INFISICAL_MACHINE_IDENTITY_CLIENT_ID_FOR_API` and `INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET_FOR_API` as the client ID and client secret respectively. + For each service you want to inject secrets into, generate the required `INFISICAL_TOKEN_SERVICE_A` and `INFISICAL_TOKEN_SERVICE_B`. ```yaml # Example Docker Compose file @@ -32,45 +26,32 @@ Follow this [guide](./docker) to configure the Infisical CLI for each service th build: . image: example-service-1 environment: - - INFISICAL_MACHINE_IDENTITY_CLIENT_ID=${INFISICAL_MACHINE_IDENTITY_CLIENT_ID_FOR_WEB} - - INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET=${INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET_FOR_WEB} + - INFISICAL_TOKEN=${INFISICAL_TOKEN_SERVICE_A} api: build: . image: example-service-2 environment: - - INFISICAL_MACHINE_IDENTITY_CLIENT_ID=${INFISICAL_MACHINE_IDENTITY_CLIENT_ID_FOR_API} - - INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET=${INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET_FOR_API} + - INFISICAL_TOKEN=${INFISICAL_TOKEN_SERVICE_B} ``` ### Export shell variables - Next, set the shell variables you defined in your compose file. This can be done manually or via your CI/CD environment. Once done, it will be used to populate the corresponding `INFISICAL_MACHINE_IDENTITY_CLIENT_ID` and `INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET` in your Docker Compose file. + Next, set the shell variables you defined in your compose file. This can be done manually or via your CI/CD environment. Once done, it will be used to populate the corresponding `INFISICAL_TOKEN_SERVICE_A` and `INFISICAL_TOKEN_SERVICE_B` in your Docker Compose file. ```bash #Example # Token refers to the token we generated in step 2 for this service - export INFISICAL_MACHINE_IDENTITY_CLIENT_ID_FOR_WEB= - export INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET_FOR_WEB= - - # Token refers to the token we generated in step 2 for this service - export INFISICAL_MACHINE_IDENTITY_CLIENT_ID_FOR_API= - export INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET_FOR_API= + export INFISICAL_TOKEN_SERVICE_A=$(infisical login --method=universal-auth --client-id= --client-secret= --silent --plain) + export INFISICAL_TOKEN_SERVICE_B=$(infisical login --method=universal-auth --client-id= --client-secret= --silent --plain) # Then run your compose file in the same terminal. docker-compose ... ``` - - - - Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). - -They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - - + ## Generate service token Generate a unique [Service Token](/documentation/platform/token) for each service. diff --git a/docs/integrations/platforms/docker.mdx b/docs/integrations/platforms/docker.mdx index e858ddad01..8e429461a2 100644 --- a/docs/integrations/platforms/docker.mdx +++ b/docs/integrations/platforms/docker.mdx @@ -81,14 +81,46 @@ CMD ["infisical", "run", "--projectId", "", "--command", "npm r +### Using a Starting Script + +The drawback of the previous method is that you would have to generate the `INFISICAL_TOKEN` manually. To automate this process, you can use a shell script as your starting command. + + + + Create a machine identity for your project by following the steps in the [Machine Identity](/documentation/platform/identities/machine-identities) guide. This identity will enable authentication and secret retrieval from Infisical. + + + + Create a shell script to obtain an access token for the machine identity: + + ```bash script.sh + #!/bin/sh + export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=$INFISICAL_MACHINE_CLIENT_ID --client-secret=$INFISICAL_MACHINE_CLIENT_SECRET --plain --silent) + exec infisical run --token $INFISICAL_TOKEN --projectId $PROJECT_ID --env $INFISICAL_SECRET_ENV --domain $INFISICAL_API_URL -- + ``` + + > **Note:** The access token has a limited lifespan. Use the [infisical token renew](/cli/commands/token) CLI command to renew it when necessary. + + Caution: Implementing this directly in your Dockerfile presents two key issues: + + 1. Lack of persistence: Variables set in one build step are not automatically carried over to subsequent steps, complicating the process. + 2. Security risk: It exposes sensitive credentials inside your container, potentially allowing anyone with container access to retrieve them. + + + + + Grant the Infisical CLI access to the access token, inside your Docker container. This allows the CLI to fetch and inject secrets into your application. + + Add the following line to your Dockerfile: + + ```dockerfile + CMD ["./script.sh"] + ``` + + + - -Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). - -They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - - ```dockerfile CMD ["infisical", "run", "--", "[your service start command]"] diff --git a/docs/integrations/platforms/ecs-with-agent.mdx b/docs/integrations/platforms/ecs-with-agent.mdx index 43760d2bb8..31c5a982b1 100644 --- a/docs/integrations/platforms/ecs-with-agent.mdx +++ b/docs/integrations/platforms/ecs-with-agent.mdx @@ -1,27 +1,30 @@ --- -title: 'Amazon ECS' +title: "Amazon ECS" description: "Learn how to deliver secrets to Amazon Elastic Container Service." --- ![ecs diagram](/images/guides/agent-with-ecs/ecs-diagram.png) -This guide will go over the steps needed to access secrets stored in Infisical from Amazon Elastic Container Service (ECS). +This guide will go over the steps needed to access secrets stored in Infisical from Amazon Elastic Container Service (ECS). -At a high level, the steps involve setting up an ECS task with a [Infisical Agent](/infisical-agent/overview) as a sidecar container. This sidecar container uses [Universal Auth](/documentation/platform/identities/universal-auth) to authenticate with Infisical to fetch secrets/access tokens. +At a high level, the steps involve setting up an ECS task with an [Infisical Agent](/infisical-agent/overview) as a sidecar container. This sidecar container uses [AWS Auth](/documentation/platform/identities/aws-auth) to authenticate with Infisical to fetch secrets/access tokens. Once the secrets/access tokens are retrieved, they are then stored in a shared [Amazon Elastic File System](https://aws.amazon.com/efs/) (EFS) volume. This volume is then made accessible to your application and all of its replicas. This guide primarily focuses on integrating Infisical Cloud with Amazon ECS on AWS Fargate and Amazon EFS. However, the principles and steps can be adapted for use with any instance of Infisical (on premise or cloud) and different ECS launch configurations. ## Prerequisites + This guide requires the following prerequisites: -- Infisical account + +- Infisical account - Git installed - Terraform v1.0 or later installed - Access to AWS credentials - Understanding of [Infisical Agent](/infisical-agent/overview) ## What we will deploy + For this demonstration, we'll deploy the [File Browser](https://github.com/filebrowser/filebrowser) application on our ECS cluster. Although this guide focuses on File Browser, the principles outlined here can be applied to any application of your choice. @@ -29,20 +32,20 @@ File Browser plays a key role in this context because it enables us to view all This feature is important for our demonstration, as it allows us to verify whether the Infisical agent is depositing the expected files into the designated file volume and if those files are accessible to the application. -Volumes that contain sensitive secrets should not be publicly accessible. The use of File Browser here is solely for demonstration and verification purposes. + Volumes that contain sensitive secrets should not be publicly accessible. The + use of File Browser here is solely for demonstration and verification + purposes. - ## Configure Authentication with Infisical -In order for the Infisical agent to fetch credentials from Infisical, we'll first need to authenticate with Infisical. -While Infisical supports various authentication methods, this guide focuses on using Universal Auth to authenticate the agent with Infisical. -Follow the documentation to configure and generate a client id and client secret with Universal auth [here](/documentation/platform/identities/universal-auth). -Make sure to save these credentials somewhere handy because you'll need them soon. +In order for the Infisical agent to fetch credentials from Infisical, we'll first need to authenticate with Infisical. Follow the documentation to configure a machine identity with AWS Auth [here](/documentation/platform/identities/aws-auth). +Take note of the Machine Identity ID as you will be needing this in the preceding steps. ## Clone guide assets repository + To help you quickly deploy the example application, please clone the guide assets from this [Github repository](https://github.com/Infisical/infisical-guides.git). -This repository contains assets for all Infisical guides. The content for this guide can be found within a sub directory called `aws-ecs-with-agent`. +This repository contains assets for all Infisical guides. The content for this guide can be found within a sub directory called `aws-ecs-with-agent`. The guide will assume that `aws-ecs-with-agent` is your working directory going forward. ## Deploy example application @@ -50,95 +53,84 @@ The guide will assume that `aws-ecs-with-agent` is your working directory going Before we can deploy our full application and its related infrastructure with Terraform, we'll need to first configure our Infisical agent. ### Agent configuration overview + The agent config file defines what authentication method will be used when connecting with Infisical along with where the fetched secrets/access tokens should be saved to. -Since the Infisical agent will be deployed as a sidecar, the agent configuration file and any secret template files will need to be encoded in base64. -This encoding step is necessary as it allows these files to be added into our Terraform configuration file without needing to upload them first. - -#### Secret template file -The Infisical agent accepts one or more optional template files. If provided, the agent will fetch secrets using the set authentication method and format the fetched secrets according to the given template file. - -For demonstration purposes, we will create the following secret template file. -This template will transform our secrets from Infisical project with the ID `62fd92aa8b63973fee23dec7`, in the `dev` environment, and secrets located in the path `/`, into a `KEY=VALUE` format. - - - Remember to update the project id, environment slug and secret path to one that exists within your Infisical project - - -```secrets.template secrets.template -{{- with secret "62fd92aa8b63973fee23dec7" "dev" "/" }} -{{- range . }} -{{ .Key }}={{ .Value }} -{{- end }} -{{- end }} -``` - -Next, we need encode this template file in `base64` so it can be set in the agent configuration file. - -```bash -cat secrets.template | base64 -Cnt7LSB3aXRoIHNlY3JldCAiMWVkMjk2MWQtNDM5NS00MmNlLTlkNzQtYjk2ZGQwYmYzMDg0IiAiZGV2IiAiLyIgfX0Ke3stIHJhbmdlIC4gfX0Ke3sgLktleSB9fT17eyAuVmFsdWUgfX0Ke3stIGVuZCB9fQp7ey0gZW5kIH19 -``` +Since the Infisical agent will be deployed as a sidecar, the agent configuration file will need to be encoded in base64. +This encoding step is necessary as it allows the agent configuration file to be added into our Terraform configuration without needing to upload it first. #### Full agent configuration file -This agent config file will connect with Infisical Cloud using Universal Auth and deposit access tokens at path `/infisical-agent/access-token` and render secrets to file `/infisical-agent/secrets`. -You'll notice that instead of passing the path to the secret template file as we normally would, we set the base64 encoded template from the previous step under `base64-template-content` property. +Inside the `aws-ecs-with-agent` directory, you will find a sample `agent-config.yaml` file. This agent config file will connect with Infisical Cloud using AWS Auth and deposit access tokens at path `/infisical-agent/access-token` and render secrets to file `/infisical-agent/secrets`. ```yaml agent-config.yaml infisical: address: https://app.infisical.com exit-after-auth: true auth: - type: universal-auth - config: - remove_client_secret_on_read: false + type: aws-iam sinks: - type: file config: path: /infisical-agent/access-token templates: - - base64-template-content: Cnt7LSB3aXRoIHNlY3JldCAiMWVkMjk2MWQtNDM5NS00MmNlLTlkNzQtYjk2ZGQwYmYzMDg0IiAiZGV2IiAiLyIgfX0Ke3stIHJhbmdlIC4gfX0Ke3sgLktleSB9fT17eyAuVmFsdWUgfX0Ke3stIGVuZCB9fQp7ey0gZW5kIH19 + - template-content: | + {{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }} + {{- range . }} + {{ .Key }}={{ .Value }} + {{- end }} + {{- end }} destination-path: /infisical-agent/secrets ``` -Again, we'll need to encode the full configuration file in `base64` so it can be easily delivered via Terraform. +#### Secret template -```bash -cat agent-config.yaml | base64 -aW5maXNpY2FsOgogIGFkZHJlc3M6IGh0dHBzOi8vYXBwLmluZmlzaWNhbC5jb20KICBleGl0LWFmdGVyLWF1dGg6IHRydWUKYXV0aDoKICB0eXBlOiB1bml2ZXJzYWwtYXV0aAogIGNvbmZpZzoKICAgIHJlbW92ZV9jbGllbnRfc2VjcmV0X29uX3JlYWQ6IGZhbHNlCnNpbmtzOgogIC0gdHlwZTogZmlsZQogICAgY29uZmlnOgogICAgICBwYXRoOiAvaW5maXNpY2FsLWFnZW50L2FjY2Vzcy10b2tlbgp0ZW1wbGF0ZXM6CiAgLSBiYXNlNjQtdGVtcGxhdGUtY29udGVudDogQ250N0xTQjNhWFJvSUhObFkzSmxkQ0FpTVdWa01qazJNV1F0TkRNNU5TMDBNbU5sTFRsa056UXRZamsyWkdRd1ltWXpNRGcwSWlBaVpHVjJJaUFpTHlJZ2ZYMEtlM3N0SUhKaGJtZGxJQzRnZlgwS2Uzc2dMa3RsZVNCOWZUMTdleUF1Vm1Gc2RXVWdmWDBLZTNzdElHVnVaQ0I5ZlFwN2V5MGdaVzVrSUgxOQogICAgZGVzdGluYXRpb24tcGF0aDogL2luZmlzaWNhbC1hZ2VudC9zZWNyZXRzCg== -``` +The Infisical agent accepts one or more optional templates. If provided, the agent will fetch secrets using the set authentication method and format the fetched secrets according to the given template. +Typically, these templates are passed in to the agent configuration file via file reference using the `source-path` property but for simplicity we define them inline. -## Add auth credentials & agent config -With the base64 encoded agent config file and Universal Auth credentials in hand, it's time to assign them as values in our Terraform config file. +In the agent configuration above, the template defined will transform the secrets from Infisical project with the ID `202f04d7-e4cb-43d4-a292-e893712d61fc`, in the `dev` environment, and secrets located in the path `/`, into a `KEY=VALUE` format. -To configure these values, navigate to the `ecs.tf` file in your preferred code editor and assign values to `auth_client_id`, `auth_client_secret`, and `agent_config`. + + Remember to update the project id, environment slug and secret path to one + that exists within your Infisical project + + +## Configure app on terraform + +Navigate to the `ecs.tf` file in your preferred code editor. In the container_definitions section, assign the values to the `machine_identity_id` and `agent_config` properties. +The `agent_config` property expects the base64-encoded agent configuration file. In order to get this, we use the `base64encode` and `file` functions of HCL. ```hcl ecs.tf ...snip... -data "template_file" "cb_app" { - template = file("./templates/ecs/cb_app.json.tpl") - - vars = { - app_image = var.app_image - sidecar_image = var.sidecar_image - app_port = var.app_port - fargate_cpu = var.fargate_cpu - fargate_memory = var.fargate_memory - aws_region = var.aws_region - auth_client_id = "" - auth_client_secret = "" - agent_config = "" +resource "aws_ecs_task_definition" "app" { + family = "cb-app-task" + execution_role_arn = aws_iam_role.ecs_task_execution_role.arn + task_role_arn = aws_iam_role.ecs_task_role.arn + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = 4096 + memory = 8192 + container_definitions = templatefile("./templates/ecs/cb_app.json.tpl", { + app_image = var.app_image + sidecar_image = var.sidecar_image + app_port = var.app_port + fargate_cpu = var.fargate_cpu + fargate_memory = var.fargate_memory + aws_region = var.aws_region + machine_identity_id = "5655f4f5-332b-45f9-af06-8f493edff36f" + agent_config = base64encode(file("../agent-config.yaml")) + }) + volume { + name = "infisical-efs" + efs_volume_configuration { + file_system_id = aws_efs_file_system.infisical_efs.id + root_directory = "/" + } } } ...snip... ``` - - To keep this guide simple, `auth_client_id`, `auth_client_secret` have been added directly into the ECS container definition. - However, in production, you should securely fetch these values from AWS Secrets Manager or AWS Parameter store and feed them directly to agent sidecar. - - After these values have been set, they will be passed to the Infisical agent during startup through environment variables, as configured in the `infisical-sidecar` container below. ```terraform templates/ecs/cb_app.json.tpl @@ -169,12 +161,8 @@ After these values have been set, they will be passed to the Infisical agent dur }, "environment": [ { - "name": "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID", - "value": "${auth_client_id}" - }, - { - "name": "INFISICAL_UNIVERSAL_CLIENT_SECRET", - "value": "${auth_client_secret}" + "name": "INFISICAL_MACHINE_IDENTITY_ID", + "value": "${machine_identity_id}" }, { "name": "INFISICAL_AGENT_CONFIG_BASE64", @@ -191,9 +179,9 @@ After these values have been set, they will be passed to the Infisical agent dur ] ``` -In the above container definition, you'll notice that that the Infisical agent has a `mountPoints` defined. -This mount point is referencing to the already configured EFS volume as shown below. -`containerPath` is set to `/infisical-agent` because that is that the folder we have instructed the agent to deposit the credentials to. +In the above container definition, you'll notice that that the Infisical agent has a `mountPoints` defined. +This mount point is referencing to the already configured EFS volume as shown below. +`containerPath` is set to `/infisical-agent` because that is that the folder we have instructed the agent to deposit the credentials to. ```hcl terraform/efs.tf resource "aws_efs_file_system" "infisical_efs" { @@ -211,8 +199,9 @@ resource "aws_efs_mount_target" "mount" { ``` ## Configure AWS credentials + Because we'll be deploying the example file browser application to AWS via Terraform, you will need to obtain a set of `AWS Access Key` and `Secret Key`. -Once you have generated these credentials, export them to your terminal. +Once you have generated these credentials, export them to your terminal. 1. Export the AWS Access Key ID: @@ -227,26 +216,31 @@ Once you have generated these credentials, export them to your terminal. ``` ## Deploy terraform configuration + With the agent's sidecar configuration complete, we can now deploy our changes to AWS via Terraform. 1. Change your directory to `terraform` -```sh + +```sh cd terraform ``` 2. Initialize Terraform + ``` -$ terraform init +$ terraform init ``` -3. Preview resources that will be created +3. Preview resources that will be created + ``` $ terraform plan ``` 4. Trigger resource creation + ```bash -$ terraform apply +$ terraform apply Do you want to perform these actions? Terraform will perform the actions described above. @@ -264,24 +258,24 @@ Outputs: alb_hostname = "cb-load-balancer-1675475779.us-east-1.elb.amazonaws.com:8080" ``` -Once the resources have been successfully deployed, Terrafrom will output the host address where the file browser application will be accessible. -It may take a few minutes for the application to become fully ready. - +Once the resources have been successfully deployed, Terraform will output the host address where the file browser application will be accessible. +It may take a few minutes for the application to become fully ready. ## Verify secrets/tokens in EFS volume + To verify that the agent is depositing access tokens and rendering secrets to the paths specified in the agent config, navigate to the web address from the previous step. Once you visit the address, you'll be prompted to login. Enter the credentials shown below. ![file browser main login page](/images/guides/agent-with-ecs/file_browser_main.png) -Since our EFS volume is mounted to the path of the file browser application, we should see the access token and rendered secret file we defined via the agent config file. +Since our EFS volume is mounted to the path of the file browser application, we should see the access token and rendered secret file we defined via the agent config file. ![file browswer dashbaord](/images/guides/agent-with-ecs/filebrowser_afterlogin.png) -As expected, two files are present: `access-token` and `secrets`. -The `access-token` file should hold a valid `Bearer` token, which can be used to make HTTP requests to Infisical. +As expected, two files are present: `access-token` and `secrets`. +The `access-token` file should hold a valid `Bearer` token, which can be used to make HTTP requests to Infisical. The `secrets` file should contain secrets, formatted according to the specifications in our secret template file (presented in key=value format). ![file browser access token deposit](/images/guides/agent-with-ecs/access-token-deposit.png) -![file browser secrets render](/images/guides/agent-with-ecs/secrets-deposit.png) \ No newline at end of file +![file browser secrets render](/images/guides/agent-with-ecs/secrets-deposit.png) diff --git a/docs/integrations/platforms/infisical-agent.mdx b/docs/integrations/platforms/infisical-agent.mdx index 1516ae045a..e0ff5b6432 100644 --- a/docs/integrations/platforms/infisical-agent.mdx +++ b/docs/integrations/platforms/infisical-agent.mdx @@ -9,60 +9,264 @@ It eliminates the need to modify application logic by enabling clients to decide ![agent diagram](/images/agent/infisical-agent-diagram.png) ### Key features: + - Token renewal: Automatically authenticates with Infisical and deposits renewed access tokens at specified path for applications to consume - Templating: Renders secrets via user provided templates to desired formats for applications to consume ### Token renewal + The Infisical agent can help manage the life cycle of access tokens. The token renewal process is split into two main components: a `Method`, which is the authentication process suitable for your current setup, and `Sinks`, which are the places where the agent deposits the new access token whenever it receives updates. -When the Infisical Agent is started, it will attempt to obtain a valid access token using the authentication method you have configured. If the agent is unable to fetch a valid token, the agent will keep trying, increasing the time between each attempt. +When the Infisical Agent is started, it will attempt to obtain a valid access token using the authentication method you have configured. If the agent is unable to fetch a valid token, the agent will keep trying, increasing the time between each attempt. Once a access token is successfully fetched, the agent will make sure the access token stays valid, continuing to renew it before it expires. Every time the agent successfully retrieves a new access token, it writes the new token to the Sinks you've configured. - Access tokens can be utilized with Infisical SDKs or directly in API requests to retrieve secrets from Infisical + Access tokens can be utilized with Infisical SDKs or directly in API requests + to retrieve secrets from Infisical ### Templating -The Infisical agent can help deliver formatted secrets to your application in a variety of environments. To achieve this, the agent will retrieve secrets from Infisical, format them using a specified template, and then save these formatted secrets to a designated file path. -Templating process is done through the use of Go language's [text/template feature](https://pkg.go.dev/text/template). Multiple template definitions can be set in the agent configuration file to generate a variety of formatted secret files. +The Infisical agent can help deliver formatted secrets to your application in a variety of environments. To achieve this, the agent will retrieve secrets from Infisical, format them using a specified template, and then save these formatted secrets to a designated file path. -When the agent is started and templates are defined in the agent configuration file, the agent will attempt to acquire a valid access token using the set authentication method outlined in the agent's configuration. +Templating process is done through the use of Go language's [text/template feature](https://pkg.go.dev/text/template).You can refer to the available secret template functions [here](#available-secret-template-functions). Multiple template definitions can be set in the agent configuration file to generate a variety of formatted secret files. + +When the agent is started and templates are defined in the agent configuration file, the agent will attempt to acquire a valid access token using the set authentication method outlined in the agent's configuration. If this initial attempt is unsuccessful, the agent will momentarily pauses before continuing to make more attempts. -Once the agent successfully obtains a valid access token, the agent proceeds to fetch the secrets from Infisical using it. +Once the agent successfully obtains a valid access token, the agent proceeds to fetch the secrets from Infisical using it. It then formats these secrets using the user provided templates and writes the formatted data to configured file paths. -## Agent configuration file +## Agent configuration file -To set up the authentication method for token renewal and to define secret templates, the Infisical agent requires a YAML configuration file containing properties defined below. +To set up the authentication method for token renewal and to define secret templates, the Infisical agent requires a YAML configuration file containing properties defined below. While specifying an authentication method is mandatory to start the agent, configuring sinks and secret templates are optional. -| Field | Description | -| ---------------------------- | ----------- | -| `infisical.address` | The URL of the Infisical service. Default: `"https://app.infisical.com"`. | -| `auth.type` | The type of authentication method used. Only `"universal-auth"` type is currently available | -| `auth.config.client-id` | The file path where the universal-auth client id is stored. | -| `auth.config.client-secret` | The file path where the universal-auth client secret is stored. | -| `auth.config.remove_client_secret_on_read` | This will instruct the agent to remove the client secret from disk. | -| `sinks[].type` | The type of sink in a list of sinks. Each item specifies a sink type. Currently, only `"file"` type is available. | -| `sinks[].config.path` | The file path where the access token should be stored for each sink in the list. | -| `templates[].source-path` | The path to the template file that should be used to render secrets. | -| `templates[].destination-path` | The path where the rendered secrets from the source template will be saved to. | -| `templates[].config.polling-interval` | How frequently to check for secret changes. Default: `5 minutes` (optional) | -| `templates[].config.execute.command` | The command to execute when secret change is detected (optional) | -| `templates[].config.execute.timeout` | How long in seconds to wait for command to execute before timing out (optional) | +| Field | Description | +| ------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `infisical.address` | The URL of the Infisical service. Default: `"https://app.infisical.com"`. | +| `auth.type` | The type of authentication method used. Available options: `universal-auth`, `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, `aws-iam` | +| `auth.config.identity-id` | The file path where the machine identity id is stored

This field is required when using any of the following auth types: `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, or `aws-iam`. | +| `auth.config.service-account-token` | Path to the Kubernetes service account token to use (optional)

Default: `/var/run/secrets/kubernetes.io/serviceaccount/token` | +| `auth.config.service-account-key` | Path to your GCP service account key file. This field is required when using `gcp-iam` auth type.

Please note that the file should be in JSON format. | +| `auth.config.client-id` | The file path where the universal-auth client id is stored. | +| `auth.config.client-secret` | The file path where the universal-auth client secret is stored. | +| `auth.config.remove_client_secret_on_read` | This will instruct the agent to remove the client secret from disk. | +| `sinks[].type` | The type of sink in a list of sinks. Each item specifies a sink type. Currently, only `"file"` type is available. | +| `sinks[].config.path` | The file path where the access token should be stored for each sink in the list. | +| `templates[].source-path` | The path to the template file that should be used to render secrets. | +| `templates[].template-content` | The inline secret template to be used for rendering the secrets. | +| `templates[].destination-path` | The path where the rendered secrets from the source template will be saved to. | +| `templates[].config.polling-interval` | How frequently to check for secret changes. Default: `5 minutes` (optional) | +| `templates[].config.execute.command` | The command to execute when secret change is detected (optional) | +| `templates[].config.execute.timeout` | How long in seconds to wait for command to execute before timing out (optional) | +## Authentication + +The Infisical agent supports multiple authentication methods. Below are the available authentication methods, with their respective configurations. + + + + The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical. + + + + + Path to the file containing the universal auth client ID. + + + Path to the file containing the universal auth client secret. + + + Instructs the agent to remove the client secret from disk after reading + it. + + + + + + + To create a universal auth machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/universal-auth). + + + Update the agent configuration file with the specified auth method, client ID, and client secret. In the snippet below you can see a sample configuration of the `auth` field when using the Universal Auth method. + + ```yaml example-auth-config.yaml + auth: + type: "universal-auth" + config: + client-id: "./client-id" # Path to the file containing the client ID + client-secret: "./client" # Path to the file containing the client secret + remove_client_secret_on_read: false # Optional field, instructs the agent to remove the client secret from disk after reading it + ``` + + + + + + The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical. + +{" "} + + + + + Path to the file containing the machine identity ID. + + + Path to the Kubernetes service account token to use. Default: + `/var/run/secrets/kubernetes.io/serviceaccount/token`. + + + + + + + To create a Kubernetes machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/kubernetes-auth). + + + Update the agent configuration file with the specified auth method, identity ID, and service account token. In the snippet below you can see a sample configuration of the `auth` field when using the Kubernetes method. + + ```yaml example-auth-config.yaml + auth: + type: "kubernetes" + config: + identity-id: "./identity-id" # Path to the file containing the machine identity ID + service-account-token: "/var/run/secrets/kubernetes.io/serviceaccount/token" # Optional field, custom path to the Kubernetes service account token to use + ``` + + + + + + + The Native Azure method is used to authenticate with Infisical when running in an Azure environment. + + + + + Path to the file containing the machine identity ID. + + + + + + + To create an Azure machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/azure-auth). + + + Update the agent configuration file with the specified auth method and identity ID. In the snippet below you can see a sample configuration of the `auth` field when using the Azure method. + + ```yaml example-auth-config.yaml + auth: + type: "azure" + config: + identity-id: "./identity-id" # Path to the file containing the machine identity ID + ``` + + + + + + The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment. + + + + + Path to the file containing the machine identity ID. + + + + + + + To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth). + + + Update the agent configuration file with the specified auth method and identity ID. In the snippet below you can see a sample configuration of the `auth` field when using the GCP ID Token method. + + ```yaml example-auth-config.yaml + auth: + type: "gcp-id-token" + config: + identity-id: "./identity-id" # Path to the file containing the machine identity ID + ``` + + + + + + The GCP IAM method is used to authenticate with Infisical with a GCP service account key. + + + + + Path to the file containing the machine identity ID. + + + Path to your GCP service account key file. + + + + + + + To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth). + + + Update the agent configuration file with the specified auth method, identity ID, and service account key. In the snippet below you can see a sample configuration of the `auth` field when using the GCP IAM method. + + ```yaml example-auth-config.yaml + auth: + type: "gcp-iam" + config: + identity-id: "./identity-id" # Path to the file containing the machine identity ID + service-account-key: "./service-account-key.json" # Path to your GCP service account key file + ``` + + + + + + The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc. + + + + + Path to the file containing the machine identity ID. + + + + + + + To create an AWS machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/aws-auth). + + + Update the agent configuration file with the specified auth method and identity ID. In the snippet below you can see a sample configuration of the `auth` field when using the AWS IAM method. + + ```yaml example-auth-config.yaml + auth: + type: "aws-iam" + config: + identity-id: "./identity-id" # Path to the file containing the machine identity ID + ``` + + + + + ## Quick start Infisical Agent -To install the Infisical agent, you must first install the [Infisical CLI](../cli/overview) in the desired environment where you'd like the agent to run. This is because the Infisical agent is a sub-command of the Infisical CLI. + +To install the Infisical agent, you must first install the [Infisical CLI](/cli/overview) in the desired environment where you'd like the agent to run. This is because the Infisical agent is a sub-command of the Infisical CLI. Once you have the CLI installed, you will need to provision programmatic access for the agent via [Universal Auth](/documentation/platform/identities/universal-auth). To obtain a **Client ID** and a **Client Secret**, follow the step by step guide outlined [here](/documentation/platform/identities/universal-auth). -Next, create agent config file as shown below. +Next, create agent config file as shown below. The example agent configuration file defines the token authentication method, one sink location, and a secret template. ```yaml example-agent-config-file.yaml infisical: @@ -87,8 +291,8 @@ templates: command: ./reload-app.sh ``` -Above is an example agent configuration file that defines the token authentication method, one sink location (where to deposit access tokens after renewal) and a secret template. - +The secret template below will be used to render the secrets with the key and the value separated by `=` sign. You'll notice that a custom function named `secret` is used to fetch the secrets. +This function takes the following arguments: `secret "" "" ""`. ```text my-dot-ev-secret-template {{- with secret "6553ccb2b7da580d7f6e7260" "dev" "/" }} @@ -98,11 +302,67 @@ Above is an example agent configuration file that defines the token authenticati {{- end }} ``` -The secret template above will be used to render the secrets where the key and the value are separated by `=` sign. You'll notice that a custom function named `secret` is used to fetch the secrets. -This function takes the following arguments: `secret "" "" ""`. +After defining the agent configuration file, run the command below pointing to the path where the agent configuration file is located. -```bash +```bash infisical agent --config example-agent-config-file.yaml ``` -After defining the agent configuration file, run the command above pointing to the path where the agent configuration is located. +### Available secret template functions + + + ```bash + listSecrets "" "environment-slug" "" "" + ``` + ```bash example-template-usage-1 + {{- with listSecrets "6553ccb2b7da580d7f6e7260" "dev" "/" `{"recursive": false, "expandSecretReferences": true}` }} + {{- range . }} + {{ .Key }}={{ .Value }} + {{- end }} + {{- end }} + ``` + ```bash example-template-usage-2 +{{- with secret "da8056c8-01e2-4d24-b39f-cb4e004b8d44" "staging" "/" `{"recursive": true, "expandSecretReferences": true}` }} +{{- range . }} +{{- if eq .SecretPath "/"}} +{{ .Key }}={{ .Value }} +{{- else}} +{{ .SecretPath }}/{{ .Key }}={{ .Value }} +{{- end}} +{{- end }} +{{- end }} + ``` + + + +**Function name**: listSecrets + +**Description**: This function can be used to render the full list of secrets within a given project, environment and secret path. + +An optional JSON argument is also available. It includes the properties `recursive`, which defaults to false, and `expandSecretReferences`, which defaults to true and expands the returned secrets. + + +**Returns**: A single secret object with the following keys `Key, WorkspaceId, Value, SecretPath, Type, ID, and Comment` + + + + + ```bash + getSecretByName "" "" "" "" + ``` + +```bash example-template-usage +{{ with getSecretByName "d821f21d-aa90-453b-8448-8c78c1160a0e" "dev" "/" "POSTHOG_HOST"}} +{{ if .Value }} +password = "{{ .Value }}" +{{ end }} +{{ end }} +``` + +**Function name**: getSecretByName + +**Description**: This function can be used to render a single secret by it's name. + +**Returns**: A list of secret objects with the following keys `Key, WorkspaceId, Value, Type, ID, and Comment` + + diff --git a/docs/integrations/platforms/kubernetes.mdx b/docs/integrations/platforms/kubernetes.mdx index 41a41726c1..c39041375f 100644 --- a/docs/integrations/platforms/kubernetes.mdx +++ b/docs/integrations/platforms/kubernetes.mdx @@ -1,5 +1,5 @@ --- -title: "Kubernetes" +title: "Kubernetes Operator" description: "How to use Infisical to inject secrets into Kubernetes clusters." --- @@ -9,6 +9,12 @@ The Infisical Secrets Operator is a Kubernetes controller that retrieves secrets It uses an `InfisicalSecret` resource to specify authentication and storage methods. The operator continuously updates secrets and can also reload dependent deployments automatically. + + If you are already using the External Secrets operator, you can view the + integration documentation for it + [here](https://external-secrets.io/latest/provider/infisical/). + + ## Install Operator The operator can be install via [Helm](https://helm.sh) or [kubectl](https://github.com/kubernetes/kubectl) @@ -24,14 +30,13 @@ The operator can be install via [Helm](https://helm.sh) or [kubectl](https://git **Install the Helm chart** - For production deployments, it is highly recommended to set the chart version and the application version during installs and upgrades. - This will prevent the operator from being accidentally updated to the latest version and introduce unintended breaking changes. - - View application versions [here](https://hub.docker.com/r/infisical/kubernetes-operator/tags) and chart versions [here](https://cloudsmith.io/~infisical/repos/helm-charts/packages/detail/helm/secrets-operator/#versions) + To select a specific version, view the application versions [here](https://hub.docker.com/r/infisical/kubernetes-operator/tags) and chart versions [here](https://cloudsmith.io/~infisical/repos/helm-charts/packages/detail/helm/secrets-operator/#versions) ```bash - helm install --generate-name infisical-helm-charts/secrets-operator --version= --set controllerManager.manager.image.tag= + helm install --generate-name infisical-helm-charts/secrets-operator + ``` + ```bash # Example installing app version v0.2.0 and chart version 0.1.4 helm install --generate-name infisical-helm-charts/secrets-operator --version=0.1.4 --set controllerManager.manager.image.tag=v0.2.0 ``` @@ -72,38 +77,98 @@ spec: authentication: # Make sure to only have 1 authentication method defined, serviceToken/universalAuth. # If you have multiple authentication methods defined, it may cause issues. - universalAuth: - secretsScope: - projectSlug: - envSlug: # "dev", "staging", "prod", etc.. - secretsPath: "" # Root is "/" - recursive: true # Fetch all secrets from the specified path and all sub-directories. Default is false. - - credentialsRef: - secretName: universal-auth-credentials - secretNamespace: default - # Service tokens are deprecated and will be removed in the near future. Please use Machine Identities for authenticating with Infisical. + # (Deprecated) Service Token Auth serviceToken: serviceTokenSecretReference: secretName: service-token secretNamespace: default secretsScope: envSlug: - secretsPath: # Root is "/" - recursive: true # Fetch all secrets from the specified path and all sub-directories. Default is false. + secretsPath: + recursive: true + + # Universal Auth + universalAuth: + secretsScope: + projectSlug: new-ob-em + envSlug: dev # "dev", "staging", "prod", etc.. + secretsPath: "/" # Root is "/" + recursive: true # Wether or not to use recursive mode (Fetches all secrets in an environment from a given secret path, and all folders inside the path) / defaults to false + credentialsRef: + secretName: universal-auth-credentials + secretNamespace: default + + # Native Kubernetes Auth + kubernetesAuth: + identityId: + serviceAccountRef: + name: + namespace: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + + # AWS IAM Auth + awsIamAuth: + identityId: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + + # Azure Auth + azureAuth: + identityId: + resource: https://management.azure.com/&client_id=CLIENT_ID # (Optional) This is the Azure resource that you want to access. For example, "https://management.azure.com/". If no value is provided, it will default to "https://management.azure.com/" + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + + # GCP ID Token Auth + gcpIdTokenAuth: + identityId: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + + # GCP IAM Auth + gcpIamAuth: + identityId: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true managedSecretReference: secretName: managed-secret secretNamespace: default - creationPolicy: "Orphan" ## Owner | Orphan (default) + creationPolicy: "Orphan" ## Owner | Orphan # secretType: kubernetes.io/dockerconfigjson ``` ### InfisicalSecret CRD properties - If you are fetching secrets from a self hosted instance of Infisical set the value of `hostAPI` to + If you are fetching secrets from a self-hosted instance of Infisical set the value of `hostAPI` to ` https://your-self-hosted-instace.com/api` When `hostAPI` is not defined the operator fetches secrets from Infisical Cloud. @@ -127,6 +192,31 @@ When `hostAPI` is not defined the operator fetches secrets from Infisical Cloud. available on paid plans. Default re-sync interval is every 1 minute. + + This block defines the TLS settings to use for connecting to the Infisical + instance. + + + + This block defines the reference to the CA certificate to use for connecting + to the Infisical instance with SSL/TLS. + + + + The name of the Kubernetes secret containing the CA certificate to use for + connecting to the Infisical instance with SSL/TLS. + + + + The namespace of the Kubernetes secret containing the CA certificate to use + for connecting to the Infisical instance with SSL/TLS. + + + + The name of the key in the Kubernetes secret which contains the value of the + CA certificate to use for connecting to the Infisical instance with SSL/TLS. + + This block defines the method that will be used to authenticate with Infisical so that secrets can be fetched @@ -156,8 +246,6 @@ When `hostAPI` is not defined the operator fetches secrets from Infisical Cloud. -{" "} - Make sure to also populate the `secretsScope` field with the project slug _`projectSlug`_, environment slug _`envSlug`_, and secrets path @@ -187,13 +275,338 @@ spec: + + The Kubernetes machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within a Kubernetes environment. + + + + 1.1. Start by creating a service account in your Kubernetes cluster that will be used by Infisical to authenticate with the Kubernetes API Server. + + ```yaml infisical-service-account.yaml + apiVersion: v1 + kind: ServiceAccount + metadata: + name: infisical-auth + namespace: default + + ``` + + ``` + kubectl apply -f infisical-service-account.yaml + ``` + + 1.2. Bind the service account to the `system:auth-delegator` cluster role. As described [here](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#other-component-roles), this role allows delegated authentication and authorization checks, specifically for Infisical to access the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/). You can apply the following configuration file: + + ```yaml cluster-role-binding.yaml + apiVersion: rbac.authorization.k8s.io/v1 + kind: ClusterRoleBinding + metadata: + name: role-tokenreview-binding + namespace: default + roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: system:auth-delegator + subjects: + - kind: ServiceAccount + name: infisical-auth + namespace: default + ``` + + ``` + kubectl apply -f cluster-role-binding.yaml + ``` + + 1.3. Next, create a long-lived service account JWT token (i.e. the token reviewer JWT token) for the service account using this configuration file for a new `Secret` resource: + + ```yaml service-account-token.yaml + apiVersion: v1 + kind: Secret + type: kubernetes.io/service-account-token + metadata: + name: infisical-auth-token + annotations: + kubernetes.io/service-account.name: "infisical-auth" + ``` + + + ``` + kubectl apply -f service-account-token.yaml + ``` + + 1.4. Link the secret in step 1.3 to the service account in step 1.1: + + ```bash + kubectl patch serviceaccount infisical-auth -p '{"secrets": [{"name": "infisical-auth-token"}]}' -n default + ``` + + 1.5. Finally, retrieve the token reviewer JWT token from the secret. + + ```bash + kubectl get secret infisical-auth-token -n default -o=jsonpath='{.data.token}' | base64 --decode + ``` + + Keep this JWT token handy as you will need it for the **Token Reviewer JWT** field when configuring the Kubernetes Auth authentication method for the identity in step 2. + + + + + To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**. + + ![identities organization](/images/platform/identities/identities-org.png) + + When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles. + + ![identities organization create](/images/platform/identities/identities-org-create.png) + + Now input a few details for your new identity. Here's some guidance for each field: + + - Name (required): A friendly name for the identity. + - Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to. + + Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**. + + + To learn more about each field of the Kubernetes native authentication method, see step 2 of [guide](/documentation/platform/identities/kubernetes-auth#guide). + + + ![identities organization create auth method](/images/platform/identities/identities-org-create-kubernetes-auth-method.png) + + + + + To allow the operator to use the given identity to access secrets, you will need to add the identity to project(s) that you would like to grant it access to. + + To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**. + + Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to. + + ![identities project](/images/platform/identities/identities-project.png) + + ![identities project create](/images/platform/identities/identities-project-create.png) + + + + Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. + In the `authentication.kubernetesAuth.identityId` field, add the identity ID of the machine identity you created. + See the example below for more details. + + + Add the service account details from the previous steps under `authentication.kubernetesAuth.serviceAccountRef`. + Here you will need to enter the name and namespace of the service account. + The example below shows a complete InfisicalSecret resource with all required fields defined. + + + + + + Make sure to also populate the `secretsScope` field with the project slug + _`projectSlug`_, environment slug _`envSlug`_, and secrets path + _`secretsPath`_ that you want to fetch secrets from. Please see the example + below. + + +## Example + +```yaml example-kubernetes-auth.yaml +apiVersion: secrets.infisical.com/v1alpha1 +kind: InfisicalSecret +metadata: + name: infisicalsecret-sample-crd +spec: + authentication: + kubernetesAuth: + identityId: + serviceAccountRef: + name: + namespace: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + ... +``` + + + + + The AWS IAM machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within an AWS environment like an EC2 or a Lambda function. + + + + You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about AWS machine identities here](/documentation/platform/identities/aws-auth). + + + Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.awsIamAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details. + + + + + + Make sure to also populate the `secretsScope` field with the project slug + _`projectSlug`_, environment slug _`envSlug`_, and secrets path + _`secretsPath`_ that you want to fetch secrets from. Please see the example + below. + + +## Example + +```yaml example-aws-iam-auth.yaml +apiVersion: secrets.infisical.com/v1alpha1 +kind: InfisicalSecret +metadata: + name: infisicalsecret-sample-crd +spec: + authentication: + awsIamAuth: + identityId: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + ... +``` + + + + + The Azure machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within an Azure environment. + + + + You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about Azure machine identities here](/documentation/platform/identities/azure-auth). + + + Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.azureAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details. + + + + + + Make sure to also populate the `secretsScope` field with the project slug + _`projectSlug`_, environment slug _`envSlug`_, and secrets path + _`secretsPath`_ that you want to fetch secrets from. Please see the example + below. + + +## Example + +```yaml example-azure-auth.yaml +apiVersion: secrets.infisical.com/v1alpha1 +kind: InfisicalSecret +metadata: + name: infisicalsecret-sample-crd +spec: + authentication: + azureAuth: + identityId: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + ... +``` + + + + + The GCP ID Token machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within GCP environments. + + + + You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about GCP machine identities here](/documentation/platform/identities/gcp-auth). + + + Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.gcpIdTokenAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details. + + + + + + Make sure to also populate the `secretsScope` field with the project slug + _`projectSlug`_, environment slug _`envSlug`_, and secrets path + _`secretsPath`_ that you want to fetch secrets from. Please see the example + below. + + +## Example + +```yaml example-gcp-id-token-auth.yaml +apiVersion: secrets.infisical.com/v1alpha1 +kind: InfisicalSecret +metadata: + name: infisicalsecret-sample-crd +spec: + authentication: + gcpIdTokenAuth: + identityId: + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + ... +``` + + + + + The GCP IAM machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used both within and outside GCP environments. + + + + You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about GCP machine identities here](/documentation/platform/identities/gcp-auth). + + + Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.gcpIamAuth.identityId` field, add the identity ID of the machine identity you created. + You'll also need to add the service account key file path to your InfisicalSecret resource. In the `authentication.gcpIamAuth.serviceAccountKeyFilePath` field, add the path to your service account key file path. Please see the example below for more details. + + + + + + Make sure to also populate the `secretsScope` field with the project slug + _`projectSlug`_, environment slug _`envSlug`_, and secrets path + _`secretsPath`_ that you want to fetch secrets from. Please see the example + below. + + +## Example + +```yaml example-gcp-id-token-auth.yaml +apiVersion: secrets.infisical.com/v1alpha1 +kind: InfisicalSecret +metadata: + name: infisicalsecret-sample-crd +spec: + authentication: + gcpIamAuth: + identityId: + serviceAccountKeyFilePath: "/path/to-service-account-key-file-path.json" + + # secretsScope is identical to the secrets scope in the universalAuth field in this sample. + secretsScope: + projectSlug: your-project-slug + envSlug: prod + secretsPath: "/path" + recursive: true + ... +``` + + + - - Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). - -They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - - The service token required to authenticate with Infisical needs to be stored in a Kubernetes secret. This block defines the reference to the name and namespace of secret that stores this service token. Follow the instructions below to create and store the service token in a Kubernetes secrets and reference it in your CRD. @@ -484,6 +897,42 @@ spec: +### Connecting to instances with private/self-signed certificate + +To connect to Infisical instances behind a private/self-signed certificate, you can configure the TLS settings in the `InfisicalSecret` CRD +to point to a CA certificate stored in a Kubernetes secret resource. + +```yaml +--- +spec: + hostAPI: https://app.infisical.com/api + resyncInterval: 10 + tls: + caRef: + secretName: custom-ca-certificate + secretNamespace: default + key: ca.crt + authentication: +--- +``` + +The definition file of the Kubernetes secret for the CA certificate can be structured like the following: + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: custom-ca-certificate +type: Opaque +stringData: + ca.crt: | + -----BEGIN CERTIFICATE----- + MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL + ... + BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz + -----END CERTIFICATE----- +``` + ## Auto redeployment Deployments using managed secrets don't reload automatically on updates, so they may use outdated secrets unless manually redeployed. @@ -496,6 +945,7 @@ To enable auto redeployment you simply have to add the following annotation to t ```yaml secrets.infisical.com/auto-reload: "true" ``` + ```yaml apiVersion: apps/v1 diff --git a/docs/internals/permissions.mdx b/docs/internals/permissions.mdx new file mode 100644 index 0000000000..1fce09defa --- /dev/null +++ b/docs/internals/permissions.mdx @@ -0,0 +1,211 @@ +--- +title: "Permissions" +description: "Infisical's permissions system provides granular access control." +--- + +## Overview + +The Infisical permissions system is based on a role-based access control (RBAC) model. The system allows you to define roles and assign them to users and machines. Each role has a set of permissions that define what actions a user can perform. + +Permissions are built on a subject-action-object model. The subject is the resource the permission is being applied to, the action is what the permission allows. +An example of a subject/action combination would be `secrets/read`. This permission allows the subject to read secrets. + +Refer to the table below for a list of subjects and the actions they support. + +## Subjects and Actions + + + + + + Not all actions are applicable to all subjects. As an example, the + `secrets-rollback` subject only supports `read`, and `create` as actions. + While `secrets` support `read`, `create`, `edit`, `delete`. + + +| Subject | Actions | +| ------------------------- | ----------------------------------------------------------------------------------------------------------- | +| `role` | `read`, `create`, `edit`, `delete` | +| `member` | `read`, `create`, `edit`, `delete` | +| `groups` | `read`, `create`, `edit`, `delete` | +| `settings` | `read`, `create`, `edit`, `delete` | +| `integrations` | `read`, `create`, `edit`, `delete` | +| `webhooks` | `read`, `create`, `edit`, `delete` | +| `service-tokens` | `read`, `create`, `edit`, `delete` | +| `environments` | `read`, `create`, `edit`, `delete` | +| `tags` | `read`, `create`, `edit`, `delete` | +| `audit-logs` | `read`, `create`, `edit`, `delete` | +| `ip-allowlist` | `read`, `create`, `edit`, `delete` | +| `workspace` | `edit`, `delete` | +| `secrets` | `read`, `create`, `edit`, `delete` | +| `secret-folders` | `read`, `create`, `edit`, `delete` | +| `secret-imports` | `read`, `create`, `edit`, `delete` | +| `dynamic-secrets` | `read-root-credential`, `create-root-credential`, `edit-root-credential`, `delete-root-credential`, `lease` | +| `secret-rollback` | `read`, `create` | +| `secret-approval` | `read`, `create`, `edit`, `delete` | +| `secret-rotation` | `read`, `create`, `edit`, `delete` | +| `identity` | `read`, `create`, `edit`, `delete` | +| `certificate-authorities` | `read`, `create`, `edit`, `delete` | +| `certificates` | `read`, `create`, `edit`, `delete` | +| `certificate-templates` | `read`, `create`, `edit`, `delete` | +| `pki-alerts` | `read`, `create`, `edit`, `delete` | +| `pki-collections` | `read`, `create`, `edit`, `delete` | +| `kms` | `edit` | +| `cmek` | `read`, `create`, `edit`, `delete`, `encrypt`, `decrypt` | + + + + + + + Not all actions are applicable to all subjects. As an example, the `workspace` + subject only supports `read`, and `create` as actions. While `member` support + `read`, `create`, `edit`, `delete`. + + +| Subject | Actions | +| ------------------ | ---------------------------------- | +| `workspace` | `read`, `create` | +| `role` | `read`, `create`, `edit`, `delete` | +| `member` | `read`, `create`, `edit`, `delete` | +| `secret-scanning` | `read`, `create`, `edit`, `delete` | +| `settings` | `read`, `create`, `edit`, `delete` | +| `incident-account` | `read`, `create`, `edit`, `delete` | +| `sso` | `read`, `create`, `edit`, `delete` | +| `scim` | `read`, `create`, `edit`, `delete` | +| `ldap` | `read`, `create`, `edit`, `delete` | +| `groups` | `read`, `create`, `edit`, `delete` | +| `billing` | `read`, `create`, `edit`, `delete` | +| `identity` | `read`, `create`, `edit`, `delete` | +| `kms` | `read` | + + + + +## Inversion + +Permission inversion allows you to explicitly deny actions instead of allowing them. This is supported for the following subjects: + +- secrets +- secret-folders +- secret-imports +- dynamic-secrets +- cmek + +When a permission is inverted, it changes from an "allow" rule to a "deny" rule. For example: + +```typescript +// Regular permission - allows reading secrets +{ + subject: "secrets", + action: ["read"] +} + +// Inverted permission - denies reading secrets +{ + subject: "secrets", + action: ["read"], + inverted: true +} +``` + +## Conditions + +Conditions allow you to create more granular permissions by specifying criteria that must be met for the permission to apply. This is supported for the following subjects: + +- secrets +- secret-folders +- secret-imports +- dynamic-secrets + +### Properties + +Conditions can be applied to the following properties: + +- `environment`: Control access based on environment slugs +- `secretPath`: Control access based on secret paths +- `secretName`: Control access based on secret names +- `secretTags`: Control access based on tags (only supports $in operator) + +### Operators + +The following operators are available for conditions: + +| Operator | Description | Example | +| -------- | ---------------------------------- | ----------------------------------------------------- | +| `$eq` | Equal | `{ environment: { $eq: "production" } }` | +| `$ne` | Not equal | `{ environment: { $ne: "development" } }` | +| `$in` | Matches any value in array | `{ environment: { $in: ["staging", "production"] } }` | +| `$glob` | Pattern matching using glob syntax | `{ secretPath: { $glob: "/app/\*" } }` | + +These details are especially useful if you're using the API to [create new project roles](../api-reference/endpoints/project-roles/create). +The rules outlined on this page, also apply when using our Terraform Provider to manage your Infisical project roles, or any other of our clients that manage project roles. + +## Migrating from permission V1 to permission V2 + +When upgrading to V2 permissions (i.e. when moving from using the `permissions` to `permissions_v2` field in your Terraform configurations, or upgrading to the V2 permission API), you'll need to update your permission structure as follows: + +Any permissions for `secrets` should be expanded to include equivalent permissions for: + +- `secret-imports` +- `secret-folders` (except for read permissions) +- `dynamic-secrets` + +For dynamic secrets, the actions need to be mapped differently: + +- `read` → `read-root-credential` +- `create` → `create-root-credential` +- `edit` → `edit-root-credential` (also adds `lease` permission) +- `delete` → `delete-root-credential` + +Example: + +```hcl +# Old V1 configuration +resource "infisical_project_role" "example" { + name = "example" + permissions = [ + { + subject = "secrets" + action = "read" + }, + { + subject = "secrets" + action = "edit" + } + ] +} + +# New V2 configuration +resource "infisical_project_role" "example" { + name = "example" + permissions_v2 = [ + # Original secrets permission + { + subject = "secrets" + action = ["read", "edit"] + inverted = false + }, + # Add equivalent secret-imports permission + { + subject = "secret-imports" + action = ["read", "edit"] + inverted = false + }, + # Add secret-folders permission (without read) + { + subject = "secret-folders" + action = ["edit"] + inverted = false + }, + # Add dynamic-secrets permission with mapped actions + { + subject = "dynamic-secrets" + action = ["read-root-credential", "edit-root-credential", "lease"] + inverted = false + } + ] +} +``` + +Note: When moving to V2 permissions, make sure to include all the necessary expanded permissions based on your original `secrets` permissions. diff --git a/docs/internals/security.mdx b/docs/internals/security.mdx index 1b0fb9f325..02b6fd9fb1 100644 --- a/docs/internals/security.mdx +++ b/docs/internals/security.mdx @@ -79,10 +79,9 @@ Infisical uses AES-256-GCM for symmetric encryption and x25519-xsalsa20-poly1305 By default, Infisical employs a zero-knowledge-first approach to securely storing and sharing secrets. - Each secret belongs to a project and is symmetrically encrypted by that project's unique key. Each member of a project is shared a copy of the project key, encrypted under their public key, when they are first invited to join the project. -Since these encryption operations occur on the client-side, the Infisical API is not able to view the value of any secret and the default zero-knowledge property of Infisical is retained; as you'd expect, it follows that decryption operations also occur on the client-side. + Since these encryption operations occur on the client-side, the Infisical API is not able to view the value of any secret and the default zero-knowledge property of Infisical is retained; as you'd expect, it follows that decryption operations also occur on the client-side. - An exception to the zero-knowledge property occurs when a member of a project explicitly shares that project's unique key with Infisical. It is often necessary to share the project key with Infisical in order to use features like native integrations and secret rotation that wouldn't be possible to offer otherwise. - ## Infrastructure ### High availability @@ -90,19 +89,22 @@ Since these encryption operations occur on the client-side, the Infisical API is Infisical Cloud utilizes several strategies to ensure high availability, leveraging AWS services to maintain continuous operation and data integrity. #### Multi-AZ AWS RDS -Infisical Cloud uses AWS Relational Database Service (RDS) with Multi-AZ deployments. -This configuration ensures that the database service is highly available and durable. -AWS RDS automatically provisions and maintains a synchronous standby replica of the database in a different Availability Zone (AZ). -This setup facilitates immediate failover to the standby in the event of an AZ failure, thereby ensuring that database operations can continue with minimal interruption. + +Infisical Cloud uses AWS Relational Database Service (RDS) with Multi-AZ deployments. +This configuration ensures that the database service is highly available and durable. +AWS RDS automatically provisions and maintains a synchronous standby replica of the database in a different Availability Zone (AZ). +This setup facilitates immediate failover to the standby in the event of an AZ failure, thereby ensuring that database operations can continue with minimal interruption. The continuous backup and replication to the standby instance safeguard data against loss and ensure its availability even during system failures. #### Multi-AZ ECS for Container Orchestration -Infisical Cloud leverages Amazon Elastic Container Service (ECS) in a Multi-AZ configuration for container orchestration. -This arrangement enables the management and operation of containers across multiple availability zones, increasing the application's fault tolerance. -Should there be an AZ failure, load is seamlessly sent to an operational AZ, thus minimizing downtime and preserving service availability. + +Infisical Cloud leverages Amazon Elastic Container Service (ECS) in a Multi-AZ configuration for container orchestration. +This arrangement enables the management and operation of containers across multiple availability zones, increasing the application's fault tolerance. +Should there be an AZ failure, load is seamlessly sent to an operational AZ, thus minimizing downtime and preserving service availability. #### Standby Regions for Regional Failover -To fight regional outages, secondary regions are always in standby mode and maintained with up-to-date configurations and data, ready to take over in case the primary region fails. + +To fight regional outages, secondary regions are always in standby mode and maintained with up-to-date configurations and data, ready to take over in case the primary region fails. The standby regions enable a rapid transition and service continuity with minimal disruption in the event of a complete regional failure, ensuring that Infisical Cloud services remain accessible. ### Snapshots @@ -127,7 +129,7 @@ JWT tokens are stored in browser memory and appended to outbound requests requir ### User authentication -Infisical supports several authentication methods including email/password, Google SSO, GitHub SSO, and SAML 2.0 (Okta, Azure, JumpCloud); Infisical also currently offers email-based 2FA with authenticator app methods coming in Q1 2024. +Infisical supports several authentication methods including email/password, Google SSO, GitHub SSO, SAML 2.0 (Okta, Azure, JumpCloud), and OpenID Connect; Infisical also currently offers email-based 2FA with authenticator app methods coming in Q1 2024. Infisical uses the [secure remote password protocol](https://en.wikipedia.org/wiki/Secure_Remote_Password_protocol#:~:text=The%20SRP%20protocol%20has%20a,the%20user%20to%20the%20server), commonly found in other zero-knowledge platform architectures, for authentication. Put simply, the protocol enables Infisical to validate a user's knowledge of their password without ever seeing it by constructing a mutual secret; we use this protocol because each user's password is used to seed the generation of a master encryption/decryption key via KDF for that user which the platform @@ -141,6 +143,7 @@ Lastly, Infisical enforces strong password requirements according to the guidanc to access the platform. We strongly encourage users to generate and store their passwords / master decryption key in a password manager, such as 1Password, Bitwarden, or Dashlane. + ## Role-based access control (RBAC) @@ -172,7 +175,7 @@ Please email security@infisical.com to request any reports including a letter of Whether or not Infisical or your employees can access data in the Infisical instance and/or storage backend depends on many factors how you use Infisical: - Infisical Self-Hosted: Self-hosting Infisical is common amongst organizations that prefer to keep data on their own infrastructure usually to adhere to strict regulatory and compliance requirements. In this option, organizations retain full control over their data and therefore govern the data access policy of their Infisical instance and storage backend. -- Infisical Cloud: Using Infisical's managed service, [Infisical Cloud](https://app.infisical.com) means delegating data oversight and management to Infisical. Under our policy controls, employees are only granted access to parts of infrastructure according to principle of least privilege; this is especially relevant to customer data can only be accessed currently by executive management of Infisical. Moreover, any changes to sensitive customer data is prohibited without explicit customer approval. +- Infisical Cloud: Using Infisical's managed service, [Infisical Cloud](https://app.infisical.com) means delegating data oversight and management to Infisical. Under our policy controls, employees are only granted access to parts of infrastructure according to principle of least privilege; this is especially relevant to customer data can only be accessed currently by executive management of Infisical. Moreover, any changes to sensitive customer data is prohibited without explicit customer approval. It should be noted that, even on Infisical Cloud, it is physically impossible for employees of Infisical to view the values of secrets if users have not explicitly granted Infisical access to their project (i.e. opted out of zero-knowledge). diff --git a/docs/internals/service-tokens.mdx b/docs/internals/service-tokens.mdx index 39569326ad..c48afd6230 100644 --- a/docs/internals/service-tokens.mdx +++ b/docs/internals/service-tokens.mdx @@ -2,13 +2,6 @@ title: "Service tokens" description: "Understanding service tokens and their best practices." --- - - - Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). - -They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys). - - ​ Many clients use service tokens to authenticate and read/write secrets from/to Infisical; they can be created in your project settings. diff --git a/docs/mint.json b/docs/mint.json index 3103bb59a3..f070ae88a2 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -82,7 +82,8 @@ "documentation/guides/node", "documentation/guides/python", "documentation/guides/nextjs-vercel", - "documentation/guides/microsoft-power-apps" + "documentation/guides/microsoft-power-apps", + "documentation/guides/organization-structure" ] } ] @@ -102,6 +103,33 @@ "documentation/platform/webhooks" ] }, + { + "group": "Internal PKI", + "pages": [ + "documentation/platform/pki/overview", + "documentation/platform/pki/private-ca", + "documentation/platform/pki/certificates", + "documentation/platform/pki/pki-issuer", + "documentation/platform/pki/est", + "documentation/platform/pki/alerting" + ] + }, + { + "group": "Key Management (KMS)", + "pages": [ + "documentation/platform/kms/overview", + "documentation/platform/kms/hsm-integration", + "documentation/platform/kms/kubernetes-encryption" + ] + }, + { + "group": "KMS Configuration", + "pages": [ + "documentation/platform/kms-configuration/overview", + "documentation/platform/kms-configuration/aws-kms", + "documentation/platform/kms-configuration/aws-hsm" + ] + }, { "group": "Identities", "pages": [ @@ -115,15 +143,22 @@ "pages": [ "documentation/platform/access-controls/overview", "documentation/platform/access-controls/role-based-access-controls", + "documentation/platform/access-controls/attribute-based-access-controls", "documentation/platform/access-controls/additional-privileges", "documentation/platform/access-controls/temporary-access", "documentation/platform/access-controls/access-requests", "documentation/platform/pr-workflows", - "documentation/platform/audit-logs", - "documentation/platform/audit-log-streams", "documentation/platform/groups" ] }, + { + "group": "Audit Logs", + "pages": [ + "documentation/platform/audit-logs", + "documentation/platform/audit-log-streams/audit-log-streams", + "documentation/platform/audit-log-streams/audit-log-streams-with-fluentbit" + ] + }, { "group": "Secret Rotation", "pages": [ @@ -131,6 +166,7 @@ "documentation/platform/secret-rotation/sendgrid", "documentation/platform/secret-rotation/postgres", "documentation/platform/secret-rotation/mysql", + "documentation/platform/secret-rotation/mssql", "documentation/platform/secret-rotation/aws-iam" ] }, @@ -140,9 +176,35 @@ "documentation/platform/dynamic-secrets/overview", "documentation/platform/dynamic-secrets/postgresql", "documentation/platform/dynamic-secrets/mysql", + "documentation/platform/dynamic-secrets/mssql", "documentation/platform/dynamic-secrets/oracle", "documentation/platform/dynamic-secrets/cassandra", - "documentation/platform/dynamic-secrets/aws-iam" + "documentation/platform/dynamic-secrets/redis", + "documentation/platform/dynamic-secrets/aws-elasticache", + "documentation/platform/dynamic-secrets/elastic-search", + "documentation/platform/dynamic-secrets/rabbit-mq", + "documentation/platform/dynamic-secrets/aws-iam", + "documentation/platform/dynamic-secrets/mongo-atlas", + "documentation/platform/dynamic-secrets/mongo-db", + "documentation/platform/dynamic-secrets/azure-entra-id", + "documentation/platform/dynamic-secrets/ldap", + "documentation/platform/dynamic-secrets/sap-hana", + "documentation/platform/dynamic-secrets/snowflake" + ] + }, + "documentation/platform/project-templates", + { + "group": "Workflow Integrations", + "pages": [ + "documentation/platform/workflow-integrations/slack-integration" + ] + }, + { + "group": "Admin Consoles", + "pages": [ + "documentation/platform/admin-panel/overview", + "documentation/platform/admin-panel/server-admin", + "documentation/platform/admin-panel/org-admin-console" ] }, "documentation/platform/secret-sharing" @@ -153,11 +215,21 @@ "pages": [ "documentation/platform/auth-methods/email-password", "documentation/platform/token", + "documentation/platform/identities/token-auth", "documentation/platform/identities/universal-auth", "documentation/platform/identities/kubernetes-auth", "documentation/platform/identities/gcp-auth", "documentation/platform/identities/azure-auth", "documentation/platform/identities/aws-auth", + { + "group": "OIDC Auth", + "pages": [ + "documentation/platform/identities/oidc-auth/general", + "documentation/platform/identities/oidc-auth/github", + "documentation/platform/identities/oidc-auth/circleci", + "documentation/platform/identities/oidc-auth/gitlab" + ] + }, "documentation/platform/mfa", { "group": "SSO", @@ -170,7 +242,10 @@ "documentation/platform/sso/azure", "documentation/platform/sso/jumpcloud", "documentation/platform/sso/keycloak-saml", - "documentation/platform/sso/google-saml" + "documentation/platform/sso/google-saml", + "documentation/platform/sso/keycloak-oidc", + "documentation/platform/sso/auth0-oidc", + "documentation/platform/sso/general-oidc" ] }, { @@ -187,7 +262,8 @@ "documentation/platform/scim/overview", "documentation/platform/scim/okta", "documentation/platform/scim/azure", - "documentation/platform/scim/jumpcloud" + "documentation/platform/scim/jumpcloud", + "documentation/platform/scim/group-mappings" ] } ] @@ -197,7 +273,7 @@ "pages": [ "self-hosting/overview", { - "group": "Installation methods", + "group": "Containerized installation methods", "pages": [ "self-hosting/deployment-options/standalone-infisical", "self-hosting/deployment-options/docker-swarm", @@ -211,12 +287,16 @@ "group": "Guides", "pages": [ "self-hosting/configuration/schema-migrations", - "self-hosting/guides/mongo-to-postgres" + "self-hosting/guides/mongo-to-postgres", + "self-hosting/guides/custom-certificates" ] }, { "group": "Reference architectures", - "pages": ["self-hosting/reference-architectures/aws-ecs"] + "pages": [ + "self-hosting/reference-architectures/aws-ecs", + "self-hosting/reference-architectures/linux-deployment-ha" + ] }, "self-hosting/ee", "self-hosting/faq" @@ -293,6 +373,8 @@ }, "integrations/cloud/vercel", "integrations/cloud/azure-key-vault", + "integrations/cloud/azure-app-configuration", + "integrations/cloud/azure-devops", "integrations/cloud/gcp-secret-manager", { "group": "Cloudflare", @@ -301,20 +383,21 @@ "integrations/cloud/cloudflare-workers" ] }, - "integrations/cloud/heroku", - "integrations/cloud/render", + "integrations/cloud/terraform-cloud", + "integrations/cloud/databricks", { "group": "View more", "pages": [ "integrations/cloud/digital-ocean-app-platform", + "integrations/cloud/heroku", "integrations/cloud/netlify", "integrations/cloud/railway", "integrations/cloud/flyio", + "integrations/cloud/render", "integrations/cloud/laravel-forge", "integrations/cloud/supabase", "integrations/cloud/northflank", "integrations/cloud/hasura-cloud", - "integrations/cloud/terraform-cloud", "integrations/cloud/qovery", "integrations/cloud/hashicorp-vault", "integrations/cloud/cloud-66", @@ -385,6 +468,7 @@ "sdks/languages/node", "sdks/languages/python", "sdks/languages/go", + "sdks/languages/ruby", "sdks/languages/java", "sdks/languages/csharp" ] @@ -408,7 +492,22 @@ "pages": [ "api-reference/endpoints/identities/create", "api-reference/endpoints/identities/update", - "api-reference/endpoints/identities/delete" + "api-reference/endpoints/identities/delete", + "api-reference/endpoints/identities/get-by-id", + "api-reference/endpoints/identities/list" + ] + }, + { + "group": "Token Auth", + "pages": [ + "api-reference/endpoints/token-auth/attach", + "api-reference/endpoints/token-auth/retrieve", + "api-reference/endpoints/token-auth/update", + "api-reference/endpoints/token-auth/revoke", + "api-reference/endpoints/token-auth/get-tokens", + "api-reference/endpoints/token-auth/create-token", + "api-reference/endpoints/token-auth/update-token", + "api-reference/endpoints/token-auth/revoke-token" ] }, { @@ -418,13 +517,78 @@ "api-reference/endpoints/universal-auth/attach", "api-reference/endpoints/universal-auth/retrieve", "api-reference/endpoints/universal-auth/update", + "api-reference/endpoints/universal-auth/revoke", "api-reference/endpoints/universal-auth/create-client-secret", "api-reference/endpoints/universal-auth/list-client-secrets", "api-reference/endpoints/universal-auth/revoke-client-secret", + "api-reference/endpoints/universal-auth/get-client-secret-by-id", "api-reference/endpoints/universal-auth/renew-access-token", "api-reference/endpoints/universal-auth/revoke-access-token" ] }, + { + "group": "GCP Auth", + "pages": [ + "api-reference/endpoints/gcp-auth/login", + "api-reference/endpoints/gcp-auth/attach", + "api-reference/endpoints/gcp-auth/retrieve", + "api-reference/endpoints/gcp-auth/update", + "api-reference/endpoints/gcp-auth/revoke" + ] + }, + { + "group": "AWS Auth", + "pages": [ + "api-reference/endpoints/aws-auth/login", + "api-reference/endpoints/aws-auth/attach", + "api-reference/endpoints/aws-auth/retrieve", + "api-reference/endpoints/aws-auth/update", + "api-reference/endpoints/aws-auth/revoke" + ] + }, + { + "group": "Azure Auth", + "pages": [ + "api-reference/endpoints/azure-auth/login", + "api-reference/endpoints/azure-auth/attach", + "api-reference/endpoints/azure-auth/retrieve", + "api-reference/endpoints/azure-auth/update", + "api-reference/endpoints/azure-auth/revoke" + ] + }, + { + "group": "Kubernetes Auth", + "pages": [ + "api-reference/endpoints/kubernetes-auth/login", + "api-reference/endpoints/kubernetes-auth/attach", + "api-reference/endpoints/kubernetes-auth/retrieve", + "api-reference/endpoints/kubernetes-auth/update", + "api-reference/endpoints/kubernetes-auth/revoke" + ] + }, + { + "group": "OIDC Auth", + "pages": [ + "api-reference/endpoints/oidc-auth/login", + "api-reference/endpoints/oidc-auth/attach", + "api-reference/endpoints/oidc-auth/retrieve", + "api-reference/endpoints/oidc-auth/update", + "api-reference/endpoints/oidc-auth/revoke" + ] + }, + { + "group": "Groups", + "pages": [ + "api-reference/endpoints/groups/create", + "api-reference/endpoints/groups/update", + "api-reference/endpoints/groups/delete", + "api-reference/endpoints/groups/get", + "api-reference/endpoints/groups/get-by-id", + "api-reference/endpoints/groups/add-group-user", + "api-reference/endpoints/groups/remove-group-user", + "api-reference/endpoints/groups/list-group-users" + ] + }, { "group": "Organizations", "pages": [ @@ -456,6 +620,16 @@ "api-reference/endpoints/project-users/update-membership" ] }, + { + "group": "Project Groups", + "pages": [ + "api-reference/endpoints/project-groups/create", + "api-reference/endpoints/project-groups/delete", + "api-reference/endpoints/project-groups/get-by-id", + "api-reference/endpoints/project-groups/list", + "api-reference/endpoints/project-groups/update" + ] + }, { "group": "Project Identities", "pages": [ @@ -476,6 +650,16 @@ "api-reference/endpoints/project-roles/list" ] }, + { + "group": "Project Templates", + "pages": [ + "api-reference/endpoints/project-templates/create", + "api-reference/endpoints/project-templates/update", + "api-reference/endpoints/project-templates/delete", + "api-reference/endpoints/project-templates/get-by-id", + "api-reference/endpoints/project-templates/list" + ] + }, { "group": "Environments", "pages": [ @@ -488,6 +672,7 @@ "group": "Folders", "pages": [ "api-reference/endpoints/folders/list", + "api-reference/endpoints/folders/get-by-id", "api-reference/endpoints/folders/create", "api-reference/endpoints/folders/update", "api-reference/endpoints/folders/delete" @@ -497,7 +682,10 @@ "group": "Secret Tags", "pages": [ "api-reference/endpoints/secret-tags/list", + "api-reference/endpoints/secret-tags/get-by-id", + "api-reference/endpoints/secret-tags/get-by-slug", "api-reference/endpoints/secret-tags/create", + "api-reference/endpoints/secret-tags/update", "api-reference/endpoints/secret-tags/delete" ] }, @@ -516,6 +704,21 @@ "api-reference/endpoints/secrets/detach-tags" ] }, + { + "group": "Dynamic Secrets", + "pages": [ + "api-reference/endpoints/dynamic-secrets/create", + "api-reference/endpoints/dynamic-secrets/update", + "api-reference/endpoints/dynamic-secrets/delete", + "api-reference/endpoints/dynamic-secrets/get", + "api-reference/endpoints/dynamic-secrets/list", + "api-reference/endpoints/dynamic-secrets/list-leases", + "api-reference/endpoints/dynamic-secrets/create-lease", + "api-reference/endpoints/dynamic-secrets/delete-lease", + "api-reference/endpoints/dynamic-secrets/renew-lease", + "api-reference/endpoints/dynamic-secrets/get-lease" + ] + }, { "group": "Secret Imports", "pages": [ @@ -560,10 +763,93 @@ } ] }, + { + "group": "Infisical PKI", + "pages": [ + { + "group": "Certificate Authorities", + "pages": [ + "api-reference/endpoints/certificate-authorities/list", + "api-reference/endpoints/certificate-authorities/create", + "api-reference/endpoints/certificate-authorities/read", + "api-reference/endpoints/certificate-authorities/update", + "api-reference/endpoints/certificate-authorities/delete", + "api-reference/endpoints/certificate-authorities/renew", + "api-reference/endpoints/certificate-authorities/list-ca-certs", + "api-reference/endpoints/certificate-authorities/csr", + "api-reference/endpoints/certificate-authorities/cert", + "api-reference/endpoints/certificate-authorities/sign-intermediate", + "api-reference/endpoints/certificate-authorities/import-cert", + "api-reference/endpoints/certificate-authorities/issue-cert", + "api-reference/endpoints/certificate-authorities/sign-cert", + "api-reference/endpoints/certificate-authorities/crl" + ] + }, + { + "group": "Certificates", + "pages": [ + "api-reference/endpoints/certificates/list", + "api-reference/endpoints/certificates/read", + "api-reference/endpoints/certificates/revoke", + "api-reference/endpoints/certificates/delete", + "api-reference/endpoints/certificates/cert-body", + "api-reference/endpoints/certificates/issue-certificate", + "api-reference/endpoints/certificates/sign-certificate" + ] + }, + { + "group": "Certificate Templates", + "pages": [ + "api-reference/endpoints/certificate-templates/create", + "api-reference/endpoints/certificate-templates/update", + "api-reference/endpoints/certificate-templates/get-by-id", + "api-reference/endpoints/certificate-templates/delete" + ] + }, + { + "group": "Certificate Collections", + "pages": [ + "api-reference/endpoints/pki-collections/create", + "api-reference/endpoints/pki-collections/read", + "api-reference/endpoints/pki-collections/update", + "api-reference/endpoints/pki-collections/delete", + "api-reference/endpoints/pki-collections/add-item", + "api-reference/endpoints/pki-collections/list-items", + "api-reference/endpoints/pki-collections/delete-item" + ] + }, + { + "group": "PKI Alerting", + "pages": [ + "api-reference/endpoints/pki-alerts/create", + "api-reference/endpoints/pki-alerts/read", + "api-reference/endpoints/pki-alerts/update", + "api-reference/endpoints/pki-alerts/delete" + ] + } + ] + }, + { + "group": "Infisical KMS", + "pages": [ + { + "group": "Keys", + "pages": [ + "api-reference/endpoints/kms/keys/list", + "api-reference/endpoints/kms/keys/create", + "api-reference/endpoints/kms/keys/update", + "api-reference/endpoints/kms/keys/delete", + "api-reference/endpoints/kms/keys/encrypt", + "api-reference/endpoints/kms/keys/decrypt" + ] + } + ] + }, { "group": "Internals", "pages": [ "internals/overview", + "internals/permissions", "internals/components", "internals/flows", "internals/security", @@ -603,5 +889,171 @@ ], "integrations": { "intercom": "hsg644ru" + }, + "analytics": { + "koala": { + "publicApiKey": "pk_b50d7184e0e39ddd5cdb43cf6abeadd9b97d" + } + }, + "footer": { + "socials": { + "x": "https://www.twitter.com/infisical/", + "linkedin": "https://www.linkedin.com/company/infisical/", + "github": "https://github.com/Infisical/infisical-cli", + "slack": "https://infisical.com/slack" + }, + "links": [ + { + "title": "PRODUCT", + "links": [ + { "label": "Secret Management", "url": "https://infisical.com/" }, + { "label": "Secret Scanning", "url": "https://infisical.com/radar" }, + { + "label": "Share Secrets", + "url": "https://app.infisical.com/share-secret" + }, + { "label": "Pricing", "url": "https://infisical.com/pricing" }, + { + "label": "Security", + "url": "https://infisical.com/docs/internals/security" + }, + { + "label": "Blog", + "url": "https://infisical.com/blog" + }, + { + "label": "Infisical vs Vault", + "url": "https://infisical.com/infisical-vs-hashicorp-vault" + }, + { + "label": "Forum", + "url": "https://questions.infisical.com/" + } + ] + }, + { + "title": "USE CASES", + "links": [ + { + "label": "Infisical Agent", + "url": "https://infisical.com/docs/documentation/getting-started/introduction" + }, + { + "label": "Kubernetes", + "url": "https://infisical.com/docs/integrations/platforms/kubernetes" + }, + { + "label": "Dynamic Secrets", + "url": "https://infisical.com/docs/documentation/platform/dynamic-secrets/overview" + }, + { + "label": "Terraform", + "url": "https://infisical.com/docs/integrations/frameworks/terraform" + }, + { + "label": "Ansible", + "url": "https://infisical.com/docs/integrations/platforms/ansible" + }, + { + "label": "Jenkins", + "url": "https://infisical.com/docs/integrations/cicd/jenkins" + }, + { + "label": "Docker", + "url": "https://infisical.com/docs/integrations/platforms/docker-intro" + }, + { + "label": "AWS ECS", + "url": "https://infisical.com/docs/integrations/platforms/ecs-with-agent" + }, + { + "label": "GitLab", + "url": "https://infisical.com/docs/integrations/cicd/gitlab" + }, + { + "label": "GitHub", + "url": "https://infisical.com/docs/integrations/cicd/githubactions" + }, + { + "label": "SDK", + "url": "https://infisical.com/docs/sdks/overview" + } + ] + }, + { + "title": "DEVELOPERS", + "links": [ + { + "label": "Changelog", + "url": "https://www.infisical.com/docs/changelog" + }, + { + "label": "Status", + "url": "https://status.infisical.com/" + }, + { + "label": "Feedback & Requests", + "url": "https://github.com/Infisical/infisical/issues" + }, + { + "label": "Trust of Center", + "url": "https://app.vanta.com/infisical.com/trust/hoop8cr78cuarxo9sztvs" + }, + { + "label": "Open Source Friends", + "url": "https://infisical.com/infisical-friends" + }, + { + "label": "How to contribute", + "url": "https://www.infisical.com/infisical-heroes" + } + ] + }, + { + "title": "OTHERS", + "links": [ + { + "label": "Customers", + "url": "https://infisical.com/customers/traba" + }, + { + "label": "Company Handbook", + "url": "https://infisical.com/wiki/handbook/overview" + }, + { + "label": "Careers", + "url": "https://infisical.com/careers" + }, + { + "label": "Terms of Service", + "url": "https://infisical.com/terms" + }, + { + "label": "Privacy Policy", + "url": "https://infisical.com/privacy" + }, + { + "label": "Subprocessors", + "url": "https://infisical.com/subprocessors" + }, + { + "label": "SLA", + "url": "https://infisical.com/sla" + }, + { + "label": "Team Email", + "url": "mailto:team@infisical.com" + }, + { + "label": "Sales", + "url": "mailto:sales@infisical.com" + }, + { + "label": "Support", + "url": "https://infisical.com/slack" + } + ] + } + ] } } diff --git a/docs/sdks/languages/csharp.mdx b/docs/sdks/languages/csharp.mdx index 90351a986c..e6cfd7f19d 100644 --- a/docs/sdks/languages/csharp.mdx +++ b/docs/sdks/languages/csharp.mdx @@ -118,6 +118,10 @@ namespace Example Your self-hosted absolute site URL including the protocol (e.g. `https://app.infisical.com`) + + Optionally provide a path to a custom SSL certificate file. This can be substituted by setting the `INFISICAL_SSL_CERTIFICATE` environment variable to the contents of the certificate. + + The authentication object to use for the client. This is required unless you're using environment variables. @@ -375,6 +379,13 @@ By default, `GetSecret()` fetches and returns a shared secret. The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference) + + + Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference) + + diff --git a/docs/sdks/languages/go.mdx b/docs/sdks/languages/go.mdx index e60a578bcb..18fabf64ef 100644 --- a/docs/sdks/languages/go.mdx +++ b/docs/sdks/languages/go.mdx @@ -9,31 +9,27 @@ icon: "golang" If you're working with Go Lang, the official [Infisical Go SDK](https://github.com/infisical/go-sdk) package is the easiest way to fetch and work with secrets for your application. - [Package](https://pkg.go.dev/github.com/infisical/go-sdk) -- [Github Repository](https://github.com/infiscial/go-sdk) +- [Github Repository](https://github.com/infisical/go-sdk) -## Basic Usage +# Basic Usage ```go package main import ( - "fmt" - "os" - - infisical "github.com/infisical/go-sdk" + "fmt" + "os" + "context" + infisical "github.com/infisical/go-sdk" ) func main() { - client, err := infisical.NewInfisicalClient(infisical.Config{ + client := infisical.NewInfisicalClient(context.Background(), infisical.Config{ SiteUrl: "https://app.infisical.com", // Optional, default is https://app.infisical.com + AutoTokenRefresh: true, // Wether or not to let the SDK handle the access token lifecycle. Defaults to true if not specified. }) - if err != nil { - fmt.Printf("Error: %v", err) - os.Exit(1) - } - _, err = client.Auth().UniversalAuthLogin("YOUR_CLIENT_ID", "YOUR_CLIENT_SECRET") if err != nil { @@ -69,37 +65,68 @@ This example demonstrates how to use the Infisical Go SDK in a simple Go applica ```console $ go get github.com/infisical/go-sdk ``` + # Configuration Import the SDK and create a client instance. ```go -client, err := infisical.NewInfisicalClient(infisical.Config{ +client := infisical.NewInfisicalClient(context.Background(), infisical.Config{ SiteUrl: "https://app.infisical.com", // Optional, default is https://api.infisical.com }) - -if err != nil { - fmt.Printf("Error: %v", err) - os.Exit(1) -} ``` -### ClientSettings methods +### Configuration Options - - The URL of the Infisical API. Default is `https://api.infisical.com`. + + The URL of the Infisical API.. - + Optionally set the user agent that will be used for HTTP requests. _(Not recommended)_ + + + Whether or not to let the SDK handle the access token lifecycle. Defaults to true if not specified. + + + + Whether or not to suppress logs such as warnings from the token refreshing process. Defaults to false if not specified. + -### Authentication +# Automatic token refreshing + +The Infisical Go SDK supports automatic token refreshing. After using one of the auth methods such as Universal Auth, the SDK will automatically renew and re-authenticate when needed. +This behavior is enabled by default, but you can opt-out by setting `AutoTokenRefresh` to `false` in the client settings. + +```go + client := infisical.NewInfisicalClient(context.Background(), infisical.Config{ + AutoTokenRefresh: false, // <- Disable automatic token refreshing + }) +``` + +When using automatic token refreshing it's important to understand how your application uses the Infiiscal client. If you are instantiating new instances of the client often, it's important to cancel the context when the client is no longer needed to avoid the token refreshing process from running indefinitely. + +```go + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() // Cancel the context when the client is no longer needed + + client := infisical.NewInfisicalClient(ctx, infisical.Config{ + AutoTokenRefresh: true, + }) + + // Use the client +``` + +This is only necessary if you are creating multiple instances of the client, and those instances are deleted or otherwise removed throughout the application lifecycle. +If you are only creating one instance of the client, and it will be used throughout the lifetime of your application, you don't need to worry about this. + +# Authentication The SDK supports a variety of authentication methods. The most common authentication method is Universal Auth, which uses a client ID and client secret to authenticate. @@ -232,9 +259,12 @@ if err != nil { } ``` -## Working with Secrets +## Working With Secrets -### client.Secrets().List(options) +### List Secrets +`client.Secrets().List(options)` + +Retrieve all secrets within the Infisical project and environment that client is connected to. ```go secrets, err := client.Secrets().List(infisical.ListSecretsOptions{ @@ -245,9 +275,7 @@ secrets, err := client.Secrets().List(infisical.ListSecretsOptions{ }) ``` -Retrieve all secrets within the Infisical project and environment that client is connected to - -#### Parameters +### Parameters @@ -282,7 +310,11 @@ Retrieve all secrets within the Infisical project and environment that client is -### client.Secrets().Get(options) +### +### Retrieve Secret +`client.Secrets().Retrieve(options)` + +Retrieve a secret from Infisical. By default `Secrets().Retrieve()` fetches and returns a shared secret. ```go secret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{ @@ -292,11 +324,7 @@ secret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{ }) ``` -Retrieve a secret from Infisical. - -By default, `Secrets().Get()` fetches and returns a shared secret. - -#### Parameters +### Parameters @@ -318,7 +346,11 @@ By default, `Secrets().Get()` fetches and returns a shared secret. -### client.Secrets().Create(options) +### +### Create Secret +`client.Secrets().Create(options)` + +Create a new secret in Infisical. ```go secret, err := client.Secrets().Create(infisical.CreateSecretOptions{ @@ -331,9 +363,8 @@ secret, err := client.Secrets().Create(infisical.CreateSecretOptions{ }) ``` -Create a new secret in Infisical. -#### Parameters +### Parameters @@ -361,7 +392,12 @@ Create a new secret in Infisical. -### client.Secrets().Update(options) +### +### Update Secret + +`client.Secrets().Update(options)` + +Update an existing secret in Infisical. ```go secret, err := client.Secrets().Update(infisical.UpdateSecretOptions{ @@ -373,9 +409,7 @@ secret, err := client.Secrets().Update(infisical.UpdateSecretOptions{ }) ``` -Update an existing secret in Infisical. - -#### Parameters +### Parameters @@ -403,7 +437,11 @@ Update an existing secret in Infisical. -### client.Secrets().Delete(options) +### +### Delete Secret +`client.Secrets().Delete(options)` + +Delete a secret in Infisical. ```go secret, err := client.Secrets().Delete(infisical.DeleteSecretOptions{ @@ -413,9 +451,7 @@ secret, err := client.Secrets().Delete(infisical.DeleteSecretOptions{ }) ``` -Delete a secret in Infisical. - -#### Parameters +### Parameters @@ -435,4 +471,155 @@ Delete a secret in Infisical. The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". - \ No newline at end of file + + +## Working With folders + + +### +### List Folders +`client.Folders().List(options)` + +Retrieve all within the Infisical project and environment that client is connected to. + +```go +folders, err := client.Folders().List(infisical.ListFoldersOptions{ + ProjectID: "PROJECT_ID", + Environment: "dev", + Path: "/", +}) +``` + +### Parameters + + + + + The slug name (dev, prod, etc) of the environment from where folders should be fetched from. + + + + The project ID where the folder lives in. + + + + The path from where folders should be fetched from. + + + + + +### +### Create Folder +`client.Folders().Create(options)` + +Create a new folder in Infisical. + +```go +folder, err := client.Folders().Create(infisical.CreateFolderOptions{ + ProjectID: "PROJECT_ID", + Name: "new=folder-name", + Environment: "dev", + Path: "/", +}) +``` + +### Parameters + + + + + The ID of the project where the folder will be created. + + + The slug name (dev, prod, etc) of the environment where the folder will be created. + + + The path to create the folder in. The root path is `/`. + + + The name of the folder to create. + + + + + +### +### Update Folder +`client.Folders().Update(options)` + +Update an existing folder in Infisical. + +```go +folder, err := client.Folders().Update(infisical.UpdateFolderOptions{ + ProjectID: "PROJECT_ID", + Environment: "dev", + Path: "/", + FolderID: "FOLDER_ID_TO_UPDATE", + NewName: "new-folder-name", +}) +``` + +### Parameters + + + + + The ID of the project where the folder will be updated. + + + The slug name (dev, prod, etc) of the environment from where the folder lives in. + + + The path from where the folder should be updated. + + + The ID of the folder to update. + + + The new name of the folder. + + + + +### +### Delete Folder +`client.Folders().Delete(options)` + +Delete a folder in Infisical. + +```go +deletedFolder, err := client.Folders().Delete(infisical.DeleteFolderOptions{ + // Either folder ID or folder name is required. + FolderName: "name-of-folder-to-delete", + FolderID: "folder-id-to-delete", + ProjectID: "PROJECT_ID", + Environment: "dev", + Path: "/", +}) +``` + +### Parameters + + + + + The name of the folder to delete. Note that either `FolderName` or `FolderID` is required. + + + The ID of the folder to delete. Note that either `FolderName` or `FolderID` is required. + + + + The ID of the project where the folder lives in. + + + The slug name (dev, prod, etc) of the environment from where the folder lives in. + + + The path from where the folder should be deleted. + + + + + diff --git a/docs/sdks/languages/java.mdx b/docs/sdks/languages/java.mdx index 879bfa6241..3a712322e3 100644 --- a/docs/sdks/languages/java.mdx +++ b/docs/sdks/languages/java.mdx @@ -122,6 +122,10 @@ public class App { Your self-hosted absolute site URL including the protocol (e.g. `https://app.infisical.com`) + + Optionally provide a path to a custom SSL certificate file. This can be substituted by setting the `INFISICAL_SSL_CERTIFICATE` environment variable to the contents of the certificate. + + The authentication object to use for the client. This is required unless you're using environment variables. @@ -363,6 +367,12 @@ By default, `getSecret()` fetches and returns a shared secret. The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference) + + + Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference) + diff --git a/docs/sdks/languages/node.mdx b/docs/sdks/languages/node.mdx index 1546451b8b..04f210db00 100644 --- a/docs/sdks/languages/node.mdx +++ b/docs/sdks/languages/node.mdx @@ -1,9 +1,11 @@ --- title: "Infisical Node.js SDK" sidebarTitle: "Node.js" +url: "https://github.com/Infisical/node-sdk-v2" icon: "node" --- +{/* If you're working with Node.js, the official [Infisical Node SDK](https://github.com/Infisical/sdk/tree/main/languages/node) package is the easiest way to fetch and work with secrets for your application. - [NPM Package](https://www.npmjs.com/package/@infisical/sdk) @@ -137,6 +139,10 @@ Import the SDK and create a client instance with your [Machine Identity](/docume The level of logs you wish to log The logs are derived from Rust, as we have written our base SDK in Rust. + + Optionally provide a path to a custom SSL certificate file. This can be substituted by setting the `INFISICAL_SSL_CERTIFICATE` environment variable to the contents of the certificate. + + The authentication object to use for the client. This is required unless you're using environment variables. @@ -356,6 +362,12 @@ By default, `getSecret()` fetches and returns a shared secret. The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference) + + + Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference) + @@ -542,3 +554,5 @@ const decryptedString = await client.decryptSymmetric({ #### Returns (string) `plaintext` (string): The decrypted plaintext. + +*/} \ No newline at end of file diff --git a/docs/sdks/languages/python.mdx b/docs/sdks/languages/python.mdx index d9ab496883..3e066fc798 100644 --- a/docs/sdks/languages/python.mdx +++ b/docs/sdks/languages/python.mdx @@ -1,10 +1,11 @@ --- title: "Infisical Python SDK" sidebarTitle: "Python" +url: "https://github.com/Infisical/python-sdk-official?tab=readme-ov-file#infisical-python-sdk" icon: "python" --- -If you're working with Python, the official [infisical-python](https://github.com/Infisical/sdk/edit/main/crates/infisical-py) package is the easiest way to fetch and work with secrets for your application. +{/* If you're working with Python, the official [infisical-python](https://github.com/Infisical/sdk/edit/main/crates/infisical-py) package is the easiest way to fetch and work with secrets for your application. - [PyPi Package](https://pypi.org/project/infisical-python/) - [Github Repository](https://github.com/Infisical/sdk/edit/main/crates/infisical-py) @@ -97,16 +98,14 @@ client = InfisicalClient(ClientSettings( If manually set to 0, caching will be disabled, this is not recommended. - - + Your self-hosted absolute site URL including the protocol (e.g. `https://app.infisical.com`) + + Optionally provide a path to a custom SSL certificate file. This can be substituted by setting the `INFISICAL_SSL_CERTIFICATE` environment variable to the contents of the certificate. + + The authentication object to use for the client. This is required unless you're using environment variables. @@ -319,6 +318,9 @@ By default, `getSecret()` fetches and returns a shared secret. If not found, it The key of the secret to retrieve + + Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference) + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. @@ -334,6 +336,9 @@ By default, `getSecret()` fetches and returns a shared secret. If not found, it Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference) + + Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference) + @@ -525,4 +530,4 @@ decryptedString = client.decryptSymmetric(decryptOptions) #### Returns (string) -`plaintext` (string): The decrypted plaintext. +`plaintext` (string): The decrypted plaintext. */} diff --git a/docs/sdks/languages/ruby.mdx b/docs/sdks/languages/ruby.mdx new file mode 100644 index 0000000000..6175949572 --- /dev/null +++ b/docs/sdks/languages/ruby.mdx @@ -0,0 +1,436 @@ +--- +title: "Infisical Ruby SDK" +sidebarTitle: "Ruby" +icon: "diamond" +--- + + + +If you're working with Ruby , the official [Infisical Ruby SDK](https://github.com/infisical/sdk) package is the easiest way to fetch and work with secrets for your application. + +- [Ruby Package](https://rubygems.org/gems/infisical-sdk) +- [Github Repository](https://github.com/infisical/sdk) + +## Basic Usage + +```ruby +require 'infisical-sdk' + +# 1. Create the Infisical client +infisical = InfisicalSDK::InfisicalClient.new('https://app.infisical.com') + +infisical.auth.universal_auth(client_id: 'YOUR_CLIENT_ID', client_secret: 'YOUR_CLIENT_SECRET') + +test_secret = infisical.secrets.get( + secret_name: 'API_KEY', + project_id: 'project-id', + environment: 'dev' +) +puts "Secret: #{single_test_secret}" +``` + +This example demonstrates how to use the Infisical Ruby SDK in a simple Ruby application. The application retrieves a secret named `API_KEY` from the `dev` environment of the `YOUR_PROJECT_ID` project. + + + We do not recommend hardcoding your [Machine Identity Tokens](/platform/identities/overview). Setting it as an environment variable would be best. + + +# Installation + +```console +$ gem install infisical-sdk +``` +# Configuration + +Import the SDK and create a client instance. + +```ruby +infisical = InfisicalSDK::InfisicalClient.new('https://app.infisical.com') # Optional parameter, default is https://api.infisical.com +``` + +### Client parameters + + + + + The URL of the Infisical API. Default is `https://api.infisical.com`. + + + + How long the client should cache secrets for. Default is 5 minutes. Disable by setting to 0. + + + + +### Authentication + +The SDK supports a variety of authentication methods. The most common authentication method is Universal Auth, which uses a client ID and client secret to authenticate. + +#### Universal Auth + +**Using environment variables** + +Call `auth.universal_auth()` with empty arguments to use the following environment variables: + +- `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` - Your machine identity client ID. +- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret. + +**Using the SDK directly** +```ruby +infisical.auth.universal_auth(client_id: 'your-client-id', client_secret: 'your-client-secret') +``` + +#### GCP ID Token Auth + + Please note that this authentication method will only work if you're running your application on Google Cloud Platform. + Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method. + + +**Using environment variables** + +Call `.auth.gcp_id_token_auth()` with empty arguments to use the following environment variables: + +- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. + +**Using the SDK directly** +```ruby +infisical.auth.gcp_id_token_auth(identity_id: 'MACHINE_IDENTITY_ID') +``` + +#### GCP IAM Auth + +**Using environment variables** + +Call `.auth.gcp_iam_auth()` with empty arguments to use the following environment variables: + +- `INFISICAL_GCP_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. +- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file. + +**Using the SDK directly** +```ruby +infisical.auth.gcp_iam_auth(identity_id: 'MACHINE_IDENTITY_ID', service_account_key_file_path: 'SERVICE_ACCOUNT_KEY_FILE_PATH') +``` + +#### AWS IAM Auth + + Please note that this authentication method will only work if you're running your application on AWS. + Please [read more](/documentation/platform/identities/aws-auth) about this authentication method. + + +**Using environment variables** + +Call `.auth.aws_iam_auth()` with empty arguments to use the following environment variables: + +- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. + +**Using the SDK directly** +```ruby +infisical.auth.aws_iam_auth(identity_id: 'MACHINE_IDENTITY_ID') +``` + + +#### Azure Auth + + Please note that this authentication method will only work if you're running your application on Azure. + Please [read more](/documentation/platform/identities/azure-auth) about this authentication method. + + +**Using environment variables** + +Call `.auth.azure_auth()` with empty arguments to use the following environment variables: + +- `INFISICAL_AZURE_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID. + +**Using the SDK directly** +```ruby +infisical.auth.azure_auth(identity_id: 'MACHINE_IDENTITY_ID') +``` + +#### Kubernetes Auth + + Please note that this authentication method will only work if you're running your application on Kubernetes. + Please [read more](/documentation/platform/identities/kubernetes-auth) about this authentication method. + + +**Using environment variables** + +Call `.auth.kubernetes_auth()` with empty arguments to use the following environment variables: + +- `INFISICAL_KUBERNETES_IDENTITY_ID` - Your Infisical Machine Identity ID. +- `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH_ENV_NAME` - The environment variable name that contains the path to the service account token. This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`. + +**Using the SDK directly** +```ruby +# Service account token path will default to /var/run/secrets/kubernetes.io/serviceaccount/token if empty value is passed +infisical.auth.kubernetes_auth(identity_id: 'MACHINE_IDENTITY_ID', service_account_token_path: nil) +``` + +## Working with Secrets + +### client.secrets.list(options) + +```ruby +secrets = infisical.secrets.list( + project_id: 'PROJECT_ID', + environment: 'dev', + path: '/foo/bar', +) +``` + +Retrieve all secrets within the Infisical project and environment that client is connected to + +#### Parameters + + + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + + The project ID where the secret lives in. + + + + The path from where secrets should be fetched from. + + + + Whether or not to set the fetched secrets to the process environment. If true, you can access the secrets like so `System.getenv("SECRET_NAME")`. + + + + Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference) + + + + Whether or not to fetch secrets recursively from the specified path. Please note that there's a 20-depth limit for recursive fetching. + + + + Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference) + + + + + +### client.secrets.get(options) + +```ruby +secret = infisical.secrets.get( + secret_name: 'API_KEY', + project_id: project_id, + environment: env_slug +) +``` + +Retrieve a secret from Infisical. + +By default, `Secrets().Retrieve()` fetches and returns a shared secret. + +#### Parameters + + + + + The key of the secret to retrieve. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be fetched from. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + + +### client.secrets.create(options) + +```ruby +new_secret = infisical.secrets.create( + secret_name: 'NEW_SECRET', + secret_value: 'SECRET_VALUE', + project_id: 'PROJECT_ID', + environment: 'dev', +) +``` + +Create a new secret in Infisical. + +#### Parameters + + + + + The key of the secret to create. + + + The value of the secret. + + + A comment for the secret. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be created. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + + +### client.secrets.update(options) + +```ruby +updated_secret = infisical.secrets.update( + secret_name: 'SECRET_KEY_TO_UPDATE', + secret_value: 'NEW_SECRET_VALUE', + project_id: 'PROJECT_ID', + environment: 'dev', +) +``` + +Update an existing secret in Infisical. + +#### Parameters + + + + + The key of the secret to update. + + + The new value of the secret. + + + Whether or not to skip multiline encoding for the new secret value. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be updated. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + + +### client.secrets.delete(options) + +```ruby +deleted_secret = infisical.secrets.delete( + secret_name: 'SECRET_TO_DELETE', + project_id: 'PROJECT_ID', + environment: 'dev', +) +``` + +Delete a secret in Infisical. + +#### Parameters + + + + + The key of the secret to update. + + + The project ID where the secret lives in. + + + The slug name (dev, prod, etc) of the environment from where secrets should be fetched from. + + + The path from where secret should be deleted. + + + The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared". + + + + + +## Cryptography + +### Create a symmetric key + +Create a base64-encoded, 256-bit symmetric key to be used for encryption/decryption. + +```ruby +key = infisical.cryptography.create_symmetric_key +``` + +#### Returns (string) +`key` (string): A base64-encoded, 256-bit symmetric key, that can be used for encryption/decryption purposes. + +### Encrypt symmetric +```ruby +encrypted_data = infisical.cryptography.encrypt_symmetric(data: "Hello World!", key: key) +``` + +#### Parameters + + + + + The plaintext you want to encrypt. + + + The symmetric key to use for encryption. + + + + +#### Returns (object) +`tag` (string): A base64-encoded, 128-bit authentication tag. +`iv` (string): A base64-encoded, 96-bit initialization vector. +`ciphertext` (string): A base64-encoded, encrypted ciphertext. + + +### Decrypt symmetric +```ruby +decrypted_data = infisical.cryptography.decrypt_symmetric( + ciphertext: encrypted_data['ciphertext'], + iv: encrypted_data['iv'], + tag: encrypted_data['tag'], + key: key +) +``` + +#### Parameters + + + + The ciphertext you want to decrypt. + + + The symmetric key to use for encryption. + + + The initialization vector to use for decryption. + + + The authentication tag to use for decryption. + + + + +#### Returns (string) +`Plaintext` (string): The decrypted plaintext. \ No newline at end of file diff --git a/docs/sdks/overview.mdx b/docs/sdks/overview.mdx index 578e8ad0f3..11d34bb382 100644 --- a/docs/sdks/overview.mdx +++ b/docs/sdks/overview.mdx @@ -10,18 +10,25 @@ From local development to production, Infisical SDKs provide the easiest way for - Fetch secrets on demand - + Manage secrets for your Node application on demand - + Manage secrets for your Python application on demand Manage secrets for your Java application on demand + + Manage secrets for your Go application on demand + Manage secrets for your C#/.NET application on demand + + + Manage secrets for your Ruby application on demand + ## FAQ @@ -43,7 +50,4 @@ From local development to production, Infisical SDKs provide the easiest way for Note: The exact parameter name may differ depending on the language. - - The SDK caches every secret and falls back to the cached value if a request fails. If no cached value is found, and the request fails, then the SDK throws an error. - diff --git a/docs/self-hosting/configuration/envars.mdx b/docs/self-hosting/configuration/envars.mdx index b225a3ace6..3890547a1f 100644 --- a/docs/self-hosting/configuration/envars.mdx +++ b/docs/self-hosting/configuration/envars.mdx @@ -20,11 +20,19 @@ Used to configure platform-specific security and operational settings -base64 32` - + Must be an absolute URL including the protocol (e.g. https://app.infisical.com). + + Specifies the internal port on which the application listens. + + + + Telemetry helps us improve Infisical but if you want to dsiable it you may set this to `false`. + + ## Data Layer The platform utilizes Postgres to persist all of its data and Redis for caching and backgroud tasks @@ -43,6 +51,25 @@ The platform utilizes Postgres to persist all of its data and Redis for caching Redis connection string. + + Postgres database read replica connection strings. It accepts a JSON string. +``` +DB_READ_REPLICAS=[{"DB_CONNECTION_URI":""}] +``` + + + Postgres read replica connection string. + + + Configure the SSL certificate for securing a Postgres replica connection by first encoding it in base64. + Use the command below to encode your certificate: + `echo "" | base64` + + If not provided it will use master SSL certificate. + + + + ## Email service Without email configuration, Infisical's core functions like sign-up/login and secret operations work, but this disables multi-factor authentication, email invites for projects, alerts for suspicious logins, and all other email-dependent features. @@ -305,6 +332,27 @@ SMTP_FROM_NAME=Infisical + +1. Create an account and configure [SMTP2Go](https://www.smtp2go.com/) to send emails. +2. Turn on SMTP authentication +``` +SMTP_HOST=mail.smtp2go.com +SMTP_PORT=You can use one of the following ports: 2525, 80, 25, 8025, or 587 +SMTP_USERNAME=username #Your SMTP2GO account's SMTP username +SMTP_PASSWORD=password #Your SMTP2GO account's SMTP password +SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails +SMTP_FROM_NAME=Infisical +``` +{" "} + + +Optional (for TLS/SSL): + +TLS: Available on the same ports (2525, 80, 25, 8025, or 587) +SSL: Available on ports 465, 8465, and 443 + + + ## Authentication By default, users can only login via email/password based login method. @@ -441,6 +489,16 @@ To help you sync secrets from Infisical to services such as Github and Gitlab, I + + + The AWS IAM User access key for assuming roles. + + + + The AWS IAM User secret key for assuming roles. + + + OAuth2 client id for Azure integration diff --git a/docs/self-hosting/configuration/requirements.mdx b/docs/self-hosting/configuration/requirements.mdx index c0e9cab019..e0e992b7fb 100644 --- a/docs/self-hosting/configuration/requirements.mdx +++ b/docs/self-hosting/configuration/requirements.mdx @@ -59,6 +59,7 @@ Redis requirements: - Use Redis versions 6.x or 7.x. We advise upgrading to at least Redis 6.2. - Redis Cluster mode is currently not supported; use Redis Standalone, with or without High Availability (HA). - Redis storage needs are minimal: a setup with 2 vCPU, 4 GB RAM, and 30GB SSD will be sufficient for small deployments. +- Set cache eviction policy to `noeviction`. ## Supported Web Browsers diff --git a/docs/self-hosting/deployment-options/docker-compose.mdx b/docs/self-hosting/deployment-options/docker-compose.mdx index d618792551..440811e650 100644 --- a/docs/self-hosting/deployment-options/docker-compose.mdx +++ b/docs/self-hosting/deployment-options/docker-compose.mdx @@ -2,7 +2,7 @@ title: "Docker Compose" description: "Read how to run Infisical with Docker Compose template." --- -This self hosting guide will walk you though the steps to self host Infisical using Docker compose. +This self-hosting guide will walk you through the steps to self-host Infisical using Docker Compose. ## Prerequisites - [Docker](https://docs.docker.com/engine/install/) @@ -79,4 +79,4 @@ docker-compose -f docker-compose.prod.yml up Your Infisical instance should now be running on port `80`. To access your instance, visit `http://localhost:80`. -![self host sign up](/images/self-hosting/applicable-to-all/selfhost-signup.png) +![self-hosted sign up](/images/self-hosting/applicable-to-all/selfhost-signup.png) diff --git a/docs/self-hosting/deployment-options/docker-swarm.mdx b/docs/self-hosting/deployment-options/docker-swarm.mdx index c63aff23b1..5fce38b291 100644 --- a/docs/self-hosting/deployment-options/docker-swarm.mdx +++ b/docs/self-hosting/deployment-options/docker-swarm.mdx @@ -1,11 +1,11 @@ --- title: "Docker Swarm" -description: "How to self Infisical with Docker Swarm (HA)." +description: "How to self-host Infisical with Docker Swarm (HA)." --- # Self-Hosting Infisical with Docker Swarm -This guide will provide step-by-step instructions on how to self-host Infisical using Docker Swarm. This is particularly helpful for those wanting to self host Infisical on premise while still maintaining high availability (HA) for the core Infisical components. +This guide will provide step-by-step instructions on how to self-host Infisical using Docker Swarm. This is particularly helpful for those wanting to self-host Infisical on premise while still maintaining high availability (HA) for the core Infisical components. The guide will demonstrate a setup with three nodes, ensuring that the cluster can tolerate the failure of one node while remaining fully operational. ## Docker Swarm @@ -82,6 +82,13 @@ The [Docker stack file](https://github.com/Infisical/infisical/tree/main/docker- ## Deployment instructions + + Run the following on each node to install the Docker engine. + + ``` + curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh + ``` + ``` docker swarm init @@ -161,7 +168,12 @@ The [Docker stack file](https://github.com/Infisical/infisical/tree/main/docker- Run the schema migration to initialize the database. Follow the [guide here](/self-hosting/configuration/schema-migrations) to learn how. - To connect to the Postgres database, use the following default credentials defined in the Docker swarm: username: `postgres`, password: `postgres` and database: `postgres`. + To run the migrations, you'll need to connect to the Postgres instance deployed on your Docker swarm. The default Postgres user credentials are defined in the Docker swarm: username: `postgres`, password: `postgres` and database: `postgres`. + We recommend you change these credentials when deploying to production and creating a separate DB for Infisical. + + + After running the schema migrations, be sure to update the `.env` file to have the correct `DB_CONNECTION_URI`. + @@ -186,7 +198,7 @@ The [Docker stack file](https://github.com/Infisical/infisical/tree/main/docker- - ![self host sign up](/images/self-hosting/applicable-to-all/selfhost-signup.png) + ![self-hosting sign up](/images/self-hosting/applicable-to-all/selfhost-signup.png) Once all expected services are up and running, visit `:8080` of any node in the swarm. This will take you to the Infisical configuration page. diff --git a/docs/self-hosting/deployment-options/kubernetes-helm.mdx b/docs/self-hosting/deployment-options/kubernetes-helm.mdx index 8ba940fee5..35b003c2de 100644 --- a/docs/self-hosting/deployment-options/kubernetes-helm.mdx +++ b/docs/self-hosting/deployment-options/kubernetes-helm.mdx @@ -41,7 +41,7 @@ description: "Learn how to use Helm chart to install Infisical on your Kubernete To deploy this Helm chart, a Kubernetes secret named `infisical-secrets` must be present in the same namespace where the chart is being deployed. - For a minimal installation of Infisical, you need to configure `ENCRYPTION_KEY`, `AUTH_SECRET`, `DB_CONNECTION_URI`, and `REDIS_URL`. [Learn more about configuration settings](/self-hosting/configuration/envars). + For a minimal installation of Infisical, you need to configure `ENCRYPTION_KEY`, `AUTH_SECRET`, `DB_CONNECTION_URI`, `SITE_URL`, and `REDIS_URL`. [Learn more about configuration settings](/self-hosting/configuration/envars). @@ -56,6 +56,7 @@ description: "Learn how to use Helm chart to install Infisical on your Kubernete stringData: AUTH_SECRET: <> ENCRYPTION_KEY: <> + SITE_URL: <> ```
@@ -71,13 +72,14 @@ description: "Learn how to use Helm chart to install Infisical on your Kubernete ENCRYPTION_KEY: <> REDIS_URL: <> DB_CONNECTION_URI: <> + SITE_URL: <> ``` - Infisical relies a relational database, which means that database schemas need to be migrated before the instance can become operational. + Infisical relies on a relational database, which means that database schemas need to be migrated before the instance can become operational. To automate this process, the chart includes a option named `infisical.autoDatabaseSchemaMigration`. When this option is enabled, a deployment/upgrade will only occur _after_ a successful schema migration. diff --git a/docs/self-hosting/deployment-options/native/standalone-binary.mdx b/docs/self-hosting/deployment-options/native/standalone-binary.mdx new file mode 100644 index 0000000000..5767ca948e --- /dev/null +++ b/docs/self-hosting/deployment-options/native/standalone-binary.mdx @@ -0,0 +1,202 @@ +--- +title: "Standalone" +description: "Learn how to deploy Infisical in a standalone environment." +--- + +# Self-Hosting Infisical with Standalone Infisical + +Deploying Infisical in a standalone environment is a great way to get started with Infisical without having to use containers. This guide will walk you through the process of deploying Infisical in a standalone environment. +This is one of the easiest ways to deploy Infisical. It is a single executable, currently only supported on Debian-based systems. + +The standalone deployment implements the "bring your own database" (BYOD) approach. This means that you will need to provide your own databases (specifically Postgres and Redis) for the Infisical services to use. The standalone deployment does not include any databases. + +If you wish to streamline the deployment process, we recommend using the Ansible role for Infisical. The Ansible role automates the end to end deployment process, and will take care of everything like databases, redis deployment, web serving, and availability. +- [Automated Deployment with high availability (HA)](/self-hosting/deployment-options/native/high-availability) + + +## Prerequisites +- A server running a Debian-based operating system (e.g., Ubuntu, Debian) +- A Postgres database +- A Redis database + +## Installing Infisical +Installing Infisical is as simple as running a single command. You can install Infisical by running the following command: + +```bash + $ curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-core/cfg/setup/bash.deb.sh' | sudo bash && sudo apt-get install -y infisical-core +``` + +## Running Infisical +Running Infisical and serving it to the web has a few steps. Below are the steps to get you started with running Infisical in a standalone environment. + * Setup environment variables + * Running Postgres migrations + * Create system daemon + * Exposing Infisical to the internet + + + + + To use Infisical you'll need to configure the environment variables beforehand. You can acheive this by creating an environment file to be used by Infisical. + + + #### Create environment file + ```bash + $ mkdir -p /etc/infisical && touch /etc/infisical/environment + ``` + + After creating the environment file, you'll need to fill it out with your environment variables. + + #### Edit environment file + ```bash + $ nano /etc/infisical/environment + ``` + + ```bash + DB_CONNECTION_URI=postgres://user:password@localhost:5432/infisical # Replace with your Postgres database connection URI + REDIS_URL=redis://localhost:6379 # Replace with your Redis connection URI + ENCRYPTION_KEY=your_encryption_key # Replace with your encryption key (can be generated with: openssl rand -hex 16) + AUTH_SECRET=your_auth_secret # Replace with your auth secret (can be generated with: openssl rand -base64 32) + ``` + + + The minimum required environment variables are `DB_CONNECTION_URI`, `REDIS_URL`, `ENCRYPTION_KEY`, and `AUTH_SECRET`. We recommend You take a look at our [list of all available environment variables](/docs/self-hosting/configuration/envars#general-platform), and configure the ones you need. + + + + + Assuming you're starting with a fresh Postgres database, you'll need to run the Postgres migrations to syncronize the database schema. + The migration command will use the environment variables you configured in the previous step. + + + ```bash + $ eval $(cat /etc/infisical/environment) infisical-core migration:latest + ``` + + + This step will need to be repeated if you update Infisical in the future. + + + + + + ```bash + $ nano /etc/systemd/system/infisical.service + ``` + + + + Create a systemd service file for Infisical. Creating a systemd service file will allow Infisical to start automatically when the system boots or in case of a crash. + + ```bash + $ nano /etc/systemd/system/infisical.service + ``` + + ```ini + [Unit] + Description=Infisical Service + After=network.target + + [Service] + # The path to the environment file we created in the previous step + EnvironmentFile=/etc/infisical/environment + Type=simple + # Change the user to the user you want to run Infisical as + User=root + ExecStart=/usr/local/bin/infisical-core + Restart=always + RestartSec=30 + + [Install] + WantedBy=multi-user.target + ``` + + Now we need to reload the systemd daemon and start the Infisical service. + + ```bash + $ systemctl daemon-reload + $ systemctl start infisical + $ systemctl enable infisical + ``` + + + You can check the status of the Infisical service by running `systemctl status infisical`. + It is also a good idea to check the logs for any errors by running `journalctl --no-pager -u infisical`. + + + + Exposing Infisical to the internet requires setting up a reverse proxy. You can use any reverse proxy of your choice, but we recommend using HAProxy or Nginx. Below is an example of how to set up a reverse proxy using HAProxy. + + #### Install HAProxy + ```bash + $ apt-get install -y haproxy + ``` + + #### Edit HAProxy configuration + ```bash + $ nano /etc/haproxy/haproxy.cfg + ``` + + ```ini + global + log /dev/log local0 + log /dev/log local1 notice + chroot /var/lib/haproxy + stats socket /run/haproxy/admin.sock mode 660 level admin expose-fd listeners + stats timeout 30s + user haproxy + group haproxy + daemon + + defaults + log global + mode http + option httplog + option dontlognull + timeout connect 5000 + timeout client 50000 + timeout server 50000 + + frontend http-in + bind *:80 + default_backend infisical + + backend infisical + server infisicalapp 127.0.0.1:8080 check + ``` + + + If you decide to use Nginx, then please be aware that the configuration will be different. **Infisical listens on port 8080**. + + + #### Restart HAProxy + ```bash + $ systemctl restart haproxy + ``` + + + + +And that's it! You have successfully deployed Infisical in a standalone environment. You can now access Infisical by visiting `http://your-server-ip`. + + + Please take note that the Infisical team cannot provide infrastructure support for **free self-hosted** deployments.
If you need help with infrastructure, we recommend upgrading to a [paid plan](https://infisical.com/pricing) which includes infrastructure support. + + You can also join our community [Slack](https://infisical.com/slack) for help and support from the community. +
+ +## Troubleshooting + + + This is a common issue related to the HAProxy configuration file. The error is caused by the missing newline character at the end of the file. You can fix this by adding a newline character at the end of the file. + + ```bash + $ echo "" >> /etc/haproxy/haproxy.cfg + ``` + + + This issue can be caused by a number of reasons, mostly realted to the network configuration. Here are a few things you can check: + 1. Ensure that the firewall is not blocking the connection. You can check this by running `ufw status`. Ensure that port 80 is open. + 2. If you're using a cloud provider like AWS or GCP, ensure that the security group allows traffic on port 80. + 3. Ensure that the HAProxy service is running. You can check this by running `systemctl status haproxy`. + 4. Ensure that the Infisical service is running. You can check this by running `systemctl status infisical`. + \ No newline at end of file diff --git a/docs/self-hosting/deployment-options/standalone-infisical.mdx b/docs/self-hosting/deployment-options/standalone-infisical.mdx index ab15126127..6e41e4b5fc 100644 --- a/docs/self-hosting/deployment-options/standalone-infisical.mdx +++ b/docs/self-hosting/deployment-options/standalone-infisical.mdx @@ -28,7 +28,7 @@ The following guide provides a detailed step-by-step walkthrough on how you can
- For a minimal installation of Infisical, you must configure `ENCRYPTION_KEY`, `AUTH_SECRET`, `DB_CONNECTION_URI`, and `REDIS_URL`. [View all available configurations](/self-hosting/configuration/envars). + For a minimal installation of Infisical, you must configure `ENCRYPTION_KEY`, `AUTH_SECRET`, `DB_CONNECTION_URI`, `SITE_URL`, and `REDIS_URL`. [View all available configurations](/self-hosting/configuration/envars). We recommend using Cloud-based Platform as a Service (PaaS) solutions for PostgreSQL and Redis to ensure high availability. @@ -43,6 +43,7 @@ The following guide provides a detailed step-by-step walkthrough on how you can -e AUTH_SECRET="q6LRi7c717a3DQ8JUxlWYkZpMhG4+RHLoFUVt3Bvo2U=" \ -e DB_CONNECTION_URI="<>" \ -e REDIS_URL="<>" \ + -e SITE_URL="<>" \ infisical/infisical: ``` @@ -52,11 +53,11 @@ The following guide provides a detailed step-by-step walkthrough on how you can Once the container is running, verify the installation by opening your web browser and navigating to `http://localhost:80`. - ![self host sign up](/images/self-hosting/applicable-to-all/selfhost-signup.png) + ![self-hosted sign up](/images/self-hosting/applicable-to-all/selfhost-signup.png) ### Additional discussion It's important to note that the above is a basic example of deploying Infisical using Docker. In practice, for production deployments, you may want to use container orchestration platforms such as AWS ECS, Google Cloud Run, or Kubernetes. -These platforms offer additional features like scalability, load balancing, and automated deployment, making them suitable for handling production-level traffic and providing high availability. \ No newline at end of file +These platforms offer additional features like scalability, load balancing, and automated deployment, making them suitable for handling production-level traffic and providing high availability. diff --git a/docs/self-hosting/ee.mdx b/docs/self-hosting/ee.mdx index a72bad908f..83b2772e49 100644 --- a/docs/self-hosting/ee.mdx +++ b/docs/self-hosting/ee.mdx @@ -5,7 +5,7 @@ description: "Find out how to activate Infisical Enterprise edition (EE) feature While most features in Infisical are free to use, others are paid and require purchasing an enterprise license to use them. -This guide walks through how you can use these paid features on a self hosted instance of Infisical. +This guide walks through how you can use these paid features on a self-hosted instance of Infisical. @@ -15,15 +15,30 @@ This guide walks through how you can use these paid features on a self hosted in Depending on whether or not the environment where Infisical is deployed has internet access, you may be issued a regular license or an offline license. - - - If using a regular license, you should set the value of the environment variable `LICENSE_KEY` in Infisical to the issued license key. - - If using an offline license, you should set the value of the environment variable `LICENSE_KEY_OFFLINE` in Infisical to the issued license key. - - How you set the environment variable will depend on the deployment method you used. Please refer to the documentation of your deployment method for specific instructions. - + + + + - Assign the issued license key to the `LICENSE_KEY` environment variable in your Infisical instance. + + - Your Infisical instance will need to communicate with the Infisical license server to validate the license key. + If you want to limit outgoing connections only to the Infisical license server, you can use the following IP addresses: `13.248.249.247` and `35.71.190.59` + + + Ensure that your firewall or network settings allow outbound connections to these IP addresses to avoid any issues with license validation. + + + + - Assign the issued license key to the `LICENSE_KEY_OFFLINE` environment variable in your Infisical instance. + + + How you set the environment variable will depend on the deployment method you used. Please refer to the documentation of your deployment method for specific instructions. + + + Once your instance starts up, the license key will be validated and you’ll be able to use the paid features. However, when the license expires, Infisical will continue to run, but EE features will be disabled until the license is renewed or a new one is purchased. + diff --git a/docs/self-hosting/faq.mdx b/docs/self-hosting/faq.mdx index db98a23dc6..48ea0723ca 100644 --- a/docs/self-hosting/faq.mdx +++ b/docs/self-hosting/faq.mdx @@ -3,11 +3,11 @@ title: "FAQ" description: "Frequently Asked Questions about self-hosting Infisical." --- -Frequently asked questions about self hosted instance of Infisical can be found on this page. +Frequently asked questions about self-hosted instance of Infisical can be found on this page. If you can't find the answer you are looking for, please create an issue on our [GitHub repository](https://github.com/Infisical/infisical) or join our [Slack community](https://infisical.com/slack) for additional support. -This issue is typically seen when you haven't set up SSL for your self hosted instance of Infisical. When SSL is not enabled, you can't receive secure cookies, preventing the session data to not be saved. +This issue is typically seen when you haven't set up SSL for your self-hosted instance of Infisical. When SSL is not enabled, you can't receive secure cookies, preventing the session data to not be saved. To fix this, we highly recommend that you set up SSL for your instance. However, in the event you choose to use Infisical without SSL, you can do so by setting the `HTTPS_ENABLED` environment variable to `"false"` for the backend application. diff --git a/docs/self-hosting/guides/custom-certificates.mdx b/docs/self-hosting/guides/custom-certificates.mdx new file mode 100644 index 0000000000..67b258d08a --- /dev/null +++ b/docs/self-hosting/guides/custom-certificates.mdx @@ -0,0 +1,26 @@ +--- +title: "Adding Custom Certificates" +description: "Learn how to configure Infisical with custom certificates" +--- + +By default, the Infisical Docker image includes certificates from well-known public certificate authorities. +However, some integrations with Infisical may need to communicate with your internal services that use private certificate authorities. +To configure trust for custom certificates, follow these steps. This is particularly useful for connecting Infisical with self-hosted services like GitLab. + +## Prerequisites + +- Docker +- Standalone [Infisical image](https://hub.docker.com/r/infisical/infisical) +- Certificate public key `.pem` files + +## Setup + +1. Place all your public key `.pem` files into a single directory. +2. Mount the directory containing the `.pem` files to the `usr/local/share/ca-certificates/` path in the Infisical container. +3. Set the following environment variable on your Infisical container: + ``` + NODE_EXTRA_CA_CERTS=/etc/ssl/certs/ca-certificates.crt + ``` +4. Start the Infisical container. + +By following these steps, your Infisical container will trust the specified certificates, allowing you to securely connect Infisical to your internal services. diff --git a/docs/self-hosting/overview.mdx b/docs/self-hosting/overview.mdx index ccc4ae912f..a7ea50e39f 100644 --- a/docs/self-hosting/overview.mdx +++ b/docs/self-hosting/overview.mdx @@ -33,3 +33,21 @@ Choose from a number of deployment options listed below to get started. Use our Helm chart to Install Infisical on your Kubernetes cluster. +{/* + + Install Infisical on your Debian-based system without containers using our standalone binary. + + + Install Infisical on your Debian-based instances without containers using our standalone binary with high availability out of the box. + + */} diff --git a/docs/self-hosting/reference-architectures/aws-ecs.mdx b/docs/self-hosting/reference-architectures/aws-ecs.mdx index a4ce4a2b66..5a71c95725 100644 --- a/docs/self-hosting/reference-architectures/aws-ecs.mdx +++ b/docs/self-hosting/reference-architectures/aws-ecs.mdx @@ -1,5 +1,5 @@ --- -title: "AWS ECS" +title: "AWS ECS (HA)" description: "Reference architecture for self-hosting Infisical on AWS ECS" --- diff --git a/docs/self-hosting/reference-architectures/linux-deployment-ha.mdx b/docs/self-hosting/reference-architectures/linux-deployment-ha.mdx new file mode 100644 index 0000000000..7e42400162 --- /dev/null +++ b/docs/self-hosting/reference-architectures/linux-deployment-ha.mdx @@ -0,0 +1,383 @@ +--- +title: "Linux (HA)" +description: "Infisical High Availability Deployment architecture for Linux" +--- + +This guide describes how to achieve a highly available deployment of Infisical on Linux machines without containerization. The architecture provided serves as a foundation for minimum high availability, which you can scale based on your specific requirements. + +## Architecture Overview + +![High availability stack](/images/self-hosting/deployment-options/native/ha-stack.png) + +The deployment consists of the following key components: + +| Service | Nodes | Recommended Specs | GCP Instance | AWS Instance | +|---------------------------|-------|---------------------------|-----------------|--------------| +| External Load Balancer | 1 | 4 vCPU, 4 GB memory | n1-highcpu-4 | c5n.xlarge | +| Internal Load Balancer | 1 | 4 vCPU, 4 GB memory | n1-highcpu-4 | c5n.xlarge | +| Etcd Cluster | 3 | 4 vCPU, 4 GB memory | n1-highcpu-4 | c5n.xlarge | +| PostgreSQL Cluster | 3 | 2 vCPU, 8 GB memory | n1-standard-2 | m5.large | +| Redis + Sentinel | 3+3 | 2 vCPU, 8 GB memory | n1-standard-2 | m5.large | +| Infisical Core | 3 | 2 vCPU, 4 GB memory | n1-highcpu-2 | c5.large | + +### Network Architecture + +All servers operate within the 52.1.0.0/24 private network range with the following IP assignments: + +| Service | IP Address | +|----------------------|------------| +| External Load Balancer| 52.1.0.1 | +| Internal Load Balancer| 52.1.0.2 | +| Etcd Node 1 | 52.1.0.3 | +| Etcd Node 2 | 52.1.0.4 | +| Etcd Node 3 | 52.1.0.5 | +| PostgreSQL Node 1 | 52.1.0.6 | +| PostgreSQL Node 2 | 52.1.0.7 | +| PostgreSQL Node 3 | 52.1.0.8 | +| Redis Node 1 | 52.1.0.9 | +| Redis Node 2 | 52.1.0.10 | +| Redis Node 3 | 52.1.0.11 | +| Sentinel Node 1 | 52.1.0.12 | +| Sentinel Node 2 | 52.1.0.13 | +| Sentinel Node 3 | 52.1.0.14 | +| Infisical Core 1 | 52.1.0.15 | +| Infisical Core 2 | 52.1.0.16 | +| Infisical Core 3 | 52.1.0.17 | + +## Component Setup Guide + +### 1. Configure Etcd Cluster + +The Etcd cluster is needed for leader election in the PostgreSQL HA setup. Skip this step if using managed PostgreSQL. + +1. Install Etcd on each node: +```bash +sudo apt update +sudo apt install etcd +``` + +2. Configure each node with unique identifiers and cluster membership. Example configuration for Node 1 (`/etc/etcd/etcd.conf`): +```yaml +name: etcd1 +data-dir: /var/lib/etcd +initial-cluster-state: new +initial-cluster-token: etcd-cluster-1 +initial-cluster: etcd1=http://52.1.0.3:2380,etcd2=http://52.1.0.4:2380,etcd3=http://52.1.0.5:2380 +initial-advertise-peer-urls: http://52.1.0.3:2380 +listen-peer-urls: http://52.1.0.3:2380 +listen-client-urls: http://52.1.0.3:2379,http://127.0.0.1:2379 +advertise-client-urls: http://52.1.0.3:2379 +``` + +### 2. Configure PostgreSQL + +For production deployments, you have two options for highly available PostgreSQL: + +#### Option A: Managed PostgreSQL Service (Recommended for Most Users) + +Use cloud provider managed services: +- AWS: Amazon RDS for PostgreSQL with Multi-AZ +- GCP: Cloud SQL for PostgreSQL with HA configuration +- Azure: Azure Database for PostgreSQL with zone redundant HA + +These services handle replication, failover, and maintenance automatically. + +#### Option B: Self-Managed PostgreSQL Cluster + +Full HA installation guide of PostgreSQL is beyond the scope of this document. However, we have provided an overview of resources and code snippets below to guide your deployment. + +1. Required Components: + - PostgreSQL 14+ on each node + - Patroni for cluster management + - Etcd for distributed consensus + +2. Documentation we recommend you read: + - [Complete Patroni Setup Guide](https://patroni.readthedocs.io/en/latest/README.html) + - [PostgreSQL Replication Documentation](https://www.postgresql.org/docs/current/high-availability.html) + +3. Key Steps Overview: +```bash +# 1. Install requirements on each PostgreSQL node +sudo apt update +sudo apt install -y postgresql-14 postgresql-contrib-14 python3-pip +pip3 install patroni[etcd] psycopg2-binary + +# 2. Create Patroni config directory +sudo mkdir /etc/patroni +sudo chown postgres:postgres /etc/patroni + +# 3. Create Patroni configuration (example for first node) +# /etc/patroni/config.yml - REQUIRES CAREFUL CUSTOMIZATION +``` + +```yaml +scope: infisical-cluster +namespace: /db/ +name: postgresql1 + +restapi: + listen: 52.1.0.6:8008 + connect_address: 52.1.0.6:8008 + +etcd: + hosts: 52.1.0.3:2379,52.1.0.4:2379,52.1.0.5:2379 + +bootstrap: + dcs: + ttl: 30 + loop_wait: 10 + retry_timeout: 10 + maximum_lag_on_failover: 1048576 + postgresql: + use_pg_rewind: true + parameters: + max_connections: 1000 + shared_buffers: 2GB + work_mem: 8MB + max_worker_processes: 8 + max_parallel_workers_per_gather: 4 + max_parallel_workers: 8 + wal_level: replica + hot_standby: "on" + max_wal_senders: 10 + max_replication_slots: 10 + hot_standby_feedback: "on" +``` + +4. Important considerations: + - Proper disk configuration for WAL and data directories + - Network latency between nodes + - Backup strategy and point-in-time recovery + - Monitoring and alerting setup + - Connection pooling configuration + - Security and network access controls + +5. Recommended readings: + - [PostgreSQL Backup and Recovery](https://www.postgresql.org/docs/current/backup.html) + - [PostgreSQL Monitoring](https://www.postgresql.org/docs/current/monitoring.html) + +### 3. Configure Redis and Sentinel + +Similar to PostgreSQL, a full HA Redis setup guide is beyond the scope of this document. Below are the key resources and considerations for your deployment. + +#### Option A: Managed Redis Service (Recommended for Most Users) + +Use cloud provider managed Redis services: +- AWS: ElastiCache for Redis with Multi-AZ +- GCP: Memorystore for Redis with HA +- Azure: Azure Cache for Redis with zone redundancy + +Follow your cloud provider's documentation: +- [AWS ElastiCache Documentation](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/WhatIs.html) +- [GCP Memorystore Documentation](https://cloud.google.com/memorystore/docs/redis) +- [Azure Redis Cache Documentation](https://learn.microsoft.com/en-us/azure/azure-cache-for-redis/) + +#### Option B: Self-Managed Redis Cluster + +Setting up a production Redis HA cluster requires understanding several components. Refer to these linked resources: + +1. Required Reading: + - [Redis Sentinel Documentation](https://redis.io/docs/management/sentinel/) + - [Redis Replication Guide](https://redis.io/topics/replication) + - [Redis Security Guide](https://redis.io/topics/security) + +2. Key Steps Overview: +```bash +# 1. Install Redis on all nodes +sudo apt update +sudo apt install redis-server + +# 2. Configure master node (52.1.0.9) +# /etc/redis/redis.conf +``` + +```conf +bind 52.1.0.9 +port 6379 +dir /var/lib/redis +maxmemory 3gb +maxmemory-policy noeviction +requirepass "your_redis_password" +masterauth "your_redis_password" +``` + +3. Configure replica nodes (`52.1.0.10`, `52.1.0.11`): +```conf +bind 52.1.0.10 # Change for each replica +port 6379 +dir /var/lib/redis +replicaof 52.1.0.9 6379 +masterauth "your_redis_password" +requirepass "your_redis_password" +``` + +4. Configure Sentinel nodes (`52.1.0.12`, `52.1.0.13`, `52.1.0.14`): +```conf +port 26379 +sentinel monitor mymaster 52.1.0.9 6379 2 +sentinel auth-pass mymaster "your_redis_password" +sentinel down-after-milliseconds mymaster 5000 +sentinel failover-timeout mymaster 60000 +sentinel parallel-syncs mymaster 1 +``` + +5. Recommended Additional Reading: + - [Redis High Availability Tools](https://redis.io/topics/high-availability) + - [Redis Sentinel Client Implementation](https://redis.io/topics/sentinel-clients) + +### 4. Configure HAProxy Load Balancer + +Install and configure HAProxy for internal load balancing: + +```conf ha-proxy-config +global + maxconn 10000 + log stdout format raw local0 + +defaults + log global + mode tcp + retries 3 + timeout client 30m + timeout connect 10s + timeout server 30m + timeout check 5s + +listen stats + mode http + bind *:7000 + stats enable + stats uri / + +resolvers hostdns + nameserver dns 127.0.0.11:53 + resolve_retries 3 + timeout resolve 1s + timeout retry 1s + hold valid 5s + +frontend postgres_master + bind *:5000 + default_backend postgres_master_backend + +frontend postgres_replicas + bind *:5001 + default_backend postgres_replica_backend + +backend postgres_master_backend + option httpchk GET /master + http-check expect status 200 + default-server inter 3s fall 3 rise 2 on-marked-down shutdown-sessions + server postgres-1 52.1.0.6:5432 check port 8008 + server postgres-2 52.1.0.7:5432 check port 8008 + server postgres-3 52.1.0.8:5432 check port 8008 + +backend postgres_replica_backend + option httpchk GET /replica + http-check expect status 200 + default-server inter 3s fall 3 rise 2 on-marked-down shutdown-sessions + server postgres-1 52.1.0.6:5432 check port 8008 + server postgres-2 52.1.0.7:5432 check port 8008 + server postgres-3 52.1.0.8:5432 check port 8008 + +frontend redis_master_frontend + bind *:6379 + default_backend redis_master_backend + +backend redis_master_backend + option tcp-check + tcp-check send AUTH\ 123456\r\n + tcp-check expect string +OK + tcp-check send PING\r\n + tcp-check expect string +PONG + tcp-check send info\ replication\r\n + tcp-check expect string role:master + tcp-check send QUIT\r\n + tcp-check expect string +OK + server redis-1 52.1.0.9:6379 check inter 1s + server redis-2 52.1.0.10:6379 check inter 1s + server redis-3 52.1.0.11:6379 check inter 1s + +frontend infisical_frontend + bind *:80 + default_backend infisical_backend + +backend infisical_backend + option httpchk GET /api/status + http-check expect status 200 + server infisical-1 52.1.0.15:8080 check inter 1s + server infisical-2 52.1.0.16:8080 check inter 1s + server infisical-3 52.1.0.17:8080 check inter 1s +``` + +### 5. Deploy Infisical Core + + + First, add the Infisical repository: + ```bash + curl -1sLf \ + 'https://dl.cloudsmith.io/public/infisical/infisical-core/setup.deb.sh' \ + | sudo -E bash + ``` + + Then install Infisical: + ```bash + sudo apt-get update && sudo apt-get install -y infisical-core + ``` + + + For production environments, we strongly recommend installing a specific version of the package to maintain consistency across reinstalls. View available versions at [Infisical Package Versions](https://cloudsmith.io/~infisical/repos/infisical-core/packages/). + + + + + First, add the Infisical repository: + ```bash + curl -1sLf \ + 'https://dl.cloudsmith.io/public/infisical/infisical-core/setup.rpm.sh' \ + | sudo -E bash + ``` + + Then install Infisical: + ```bash + sudo yum install infisical-core + ``` + + + For production environments, we strongly recommend installing a specific version of the package to maintain consistency across reinstalls. View available versions at [Infisical Package Versions](https://cloudsmith.io/~infisical/repos/infisical-core/packages/). + + + + + +Next, create configuration file `/etc/infisical/infisical.rb` with the following: + +```ruby +infisical_core['ENCRYPTION_KEY'] = 'your-secure-encryption-key' +infisical_core['AUTH_SECRET'] = 'your-secure-auth-secret' + +infisical_core['DB_CONNECTION_URI'] = 'postgres://user:pass@52.1.0.2:5000/infisical' +infisical_core['REDIS_URL'] = 'redis://52.1.0.2:6379' + +infisical_core['PORT'] = 8080 +``` + +To generate `ENCRYPTION_KEY` and `AUTH_SECRET` view the [following configurations documentation here](/self-hosting/configuration/envars). + +If you are using managed services for either Postgres or Redis, please replace the values of the secrets accordingly. + + +Lastly, start and verify each node running infisical-core: +```bash +sudo infisical-ctl reconfigure +sudo infisical-ctl status +``` + +## Monitoring and Maintenance + +1. Monitor HAProxy stats: `http://52.1.0.2:7000/haproxy?stats` +2. Monitor Infisical logs: `sudo infisical-ctl tail` +3. Check cluster health: + - Etcd: `etcdctl cluster-health` + - PostgreSQL: `patronictl list` + - Redis: `redis-cli info replication` diff --git a/docs/style.css b/docs/style.css index 3359151e46..4d9877c6cc 100644 --- a/docs/style.css +++ b/docs/style.css @@ -10,7 +10,6 @@ #sidebar { left: 0; - padding-left: 48px; padding-right: 30px; border-right: 1px; border-color: #cdd64b; @@ -18,6 +17,10 @@ border-right: 1px solid #ebebeb; } +#sidebar-content { + padding-left: 2rem; +} + #sidebar .relative .sticky { opacity: 0; } @@ -154,4 +157,4 @@ .flex-1 .flex .items-center { /* background-color: #f5f5f5; */ -} \ No newline at end of file +} diff --git a/frontend/next.config.js b/frontend/next.config.js index 5e48e70da7..e07695ed6e 100644 --- a/frontend/next.config.js +++ b/frontend/next.config.js @@ -2,7 +2,8 @@ const path = require("path"); const ContentSecurityPolicy = ` default-src 'self'; - script-src 'self' https://app.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com https://hcaptcha.com https://*.hcaptcha.com 'unsafe-inline' 'unsafe-eval'; + connect-src 'self' https://*.posthog.com; + script-src 'self' https://*.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com https://hcaptcha.com https://*.hcaptcha.com 'unsafe-inline' 'unsafe-eval'; style-src 'self' https://rsms.me 'unsafe-inline' https://hcaptcha.com https://*.hcaptcha.com; child-src https://api.stripe.com; frame-src https://js.stripe.com/ https://api.stripe.com https://www.youtube.com/ https://hcaptcha.com https://*.hcaptcha.com; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 489df0ea18..4c76c1dfb7 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -22,9 +22,11 @@ "@headlessui/react": "^1.7.7", "@hookform/resolvers": "^2.9.10", "@octokit/rest": "^19.0.7", + "@peculiar/x509": "^1.11.0", "@radix-ui/react-accordion": "^1.1.2", "@radix-ui/react-alert-dialog": "^1.0.5", "@radix-ui/react-checkbox": "^1.0.4", + "@radix-ui/react-collapsible": "^1.0.3", "@radix-ui/react-dialog": "^1.0.5", "@radix-ui/react-dropdown-menu": "^2.0.6", "@radix-ui/react-hover-card": "^1.0.7", @@ -39,7 +41,7 @@ "@radix-ui/react-toast": "^1.1.5", "@radix-ui/react-tooltip": "^1.0.7", "@reduxjs/toolkit": "^1.8.3", - "@sindresorhus/slugify": "^2.2.1", + "@sindresorhus/slugify": "1.1.0", "@stripe/react-stripe-js": "^1.16.3", "@stripe/stripe-js": "^1.46.0", "@tanstack/react-query": "^4.23.0", @@ -47,7 +49,7 @@ "@ucast/mongo2js": "^1.3.4", "add": "^2.0.6", "argon2-browser": "^1.18.0", - "axios": "^0.27.2", + "axios": "^0.28.0", "axios-auth-refresh": "^3.3.6", "base64-loader": "^1.0.0", "classnames": "^2.3.1", @@ -63,7 +65,7 @@ "i18next-browser-languagedetector": "^7.0.1", "i18next-http-backend": "^2.2.0", "infisical-node": "^1.0.37", - "jspdf": "^2.5.1", + "jspdf": "^2.5.2", "jsrp": "^0.2.4", "jwt-decode": "^3.1.2", "lottie-react": "^2.4.0", @@ -73,6 +75,7 @@ "nprogress": "^0.2.0", "picomatch": "^2.3.1", "posthog-js": "^1.105.6", + "qrcode": "^1.5.4", "query-string": "^7.1.3", "react": "^17.0.2", "react-beautiful-dnd": "^13.1.1", @@ -82,9 +85,11 @@ "react-grid-layout": "^1.3.4", "react-hook-form": "^7.43.0", "react-i18next": "^12.2.2", + "react-icons": "^5.3.0", "react-mailchimp-subscribe": "^2.1.3", "react-markdown": "^8.0.3", "react-redux": "^8.0.2", + "react-select": "^5.8.1", "react-table": "^7.8.0", "react-toastify": "^9.1.3", "sanitize-html": "^2.12.1", @@ -116,6 +121,7 @@ "@types/jsrp": "^0.2.4", "@types/node": "^18.11.9", "@types/picomatch": "^2.3.0", + "@types/qrcode": "^1.5.5", "@types/react": "^18.0.26", "@types/sanitize-html": "^2.9.0", "@typescript-eslint/eslint-plugin": "^5.48.1", @@ -135,10 +141,10 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-simple-import-sort": "^8.0.0", "eslint-plugin-storybook": "^0.6.12", - "postcss": "^8.4.14", + "postcss": "^8.4.39", "prettier": "^2.8.3", "prettier-plugin-tailwindcss": "^0.2.2", - "storybook": "^7.5.2", + "storybook": "^7.6.20", "storybook-dark-mode": "^3.0.0", "tailwindcss": "3.2", "typescript": "^4.9.3" @@ -2109,9 +2115,9 @@ } }, "node_modules/@babel/register": { - "version": "7.23.7", - "resolved": "https://registry.npmjs.org/@babel/register/-/register-7.23.7.tgz", - "integrity": "sha512-EjJeB6+kvpk+Y5DAkEAmbOBEFkh9OASx0huoEkqYTFxAZHzOAX2Oh5uwAUuL2rUddqfM0SA+KPXV2TbzoZ2kvQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/register/-/register-7.24.6.tgz", + "integrity": "sha512-WSuFCc2wCqMeXkz/i3yfAAsxwWflEgbVkZzivgAmXl/MxrXeoYFZOOPllbC8R8WTF7u61wSRQtDVZ1879cdu6w==", "dev": true, "dependencies": { "clone-deep": "^4.0.1", @@ -2502,15 +2508,16 @@ } }, "node_modules/@emotion/babel-plugin": { - "version": "11.11.0", - "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz", - "integrity": "sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ==", + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.12.0.tgz", + "integrity": "sha512-y2WQb+oP8Jqvvclh8Q55gLUyb7UFvgv7eJfsj7td5TToBrIUtPay2kMrZi4xjq9qw2vD0ZR5fSho0yqoFgX7Rw==", + "license": "MIT", "dependencies": { "@babel/helper-module-imports": "^7.16.7", "@babel/runtime": "^7.18.3", - "@emotion/hash": "^0.9.1", - "@emotion/memoize": "^0.8.1", - "@emotion/serialize": "^1.1.2", + "@emotion/hash": "^0.9.2", + "@emotion/memoize": "^0.9.0", + "@emotion/serialize": "^1.2.0", "babel-plugin-macros": "^3.1.0", "convert-source-map": "^1.5.0", "escape-string-regexp": "^4.0.0", @@ -2519,18 +2526,31 @@ "stylis": "4.2.0" } }, + "node_modules/@emotion/babel-plugin/node_modules/@emotion/memoize": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz", + "integrity": "sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ==", + "license": "MIT" + }, "node_modules/@emotion/cache": { - "version": "11.11.0", - "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.11.0.tgz", - "integrity": "sha512-P34z9ssTCBi3e9EI1ZsWpNHcfY1r09ZO0rZbRO2ob3ZQMnFI35jB536qoXbkdesr5EUhYi22anuEJuyxifaqAQ==", + "version": "11.13.1", + "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.13.1.tgz", + "integrity": "sha512-iqouYkuEblRcXmylXIwwOodiEK5Ifl7JcX7o6V4jI3iW4mLXX3dmt5xwBtIkJiQEXFAI+pC8X0i67yiPkH9Ucw==", + "license": "MIT", "dependencies": { - "@emotion/memoize": "^0.8.1", - "@emotion/sheet": "^1.2.2", - "@emotion/utils": "^1.2.1", - "@emotion/weak-memoize": "^0.3.1", + "@emotion/memoize": "^0.9.0", + "@emotion/sheet": "^1.4.0", + "@emotion/utils": "^1.4.0", + "@emotion/weak-memoize": "^0.4.0", "stylis": "4.2.0" } }, + "node_modules/@emotion/cache/node_modules/@emotion/memoize": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz", + "integrity": "sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ==", + "license": "MIT" + }, "node_modules/@emotion/css": { "version": "11.11.2", "resolved": "https://registry.npmjs.org/@emotion/css/-/css-11.11.2.tgz", @@ -2544,9 +2564,10 @@ } }, "node_modules/@emotion/hash": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.1.tgz", - "integrity": "sha512-gJB6HLm5rYwSLI6PQa+X1t5CFGrv1J1TWG+sOyMCeKz2ojaj6Fnl/rZEspogG+cvqbt4AE/2eIyD2QfLKTBNlQ==" + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz", + "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==", + "license": "MIT" }, "node_modules/@emotion/is-prop-valid": { "version": "0.8.8", @@ -2568,18 +2589,49 @@ "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz", "integrity": "sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA==" }, - "node_modules/@emotion/serialize": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.1.3.tgz", - "integrity": "sha512-iD4D6QVZFDhcbH0RAG1uVu1CwVLMWUkCvAqqlewO/rxf8+87yIBAlt4+AxMiiKPLs5hFc0owNk/sLLAOROw3cA==", + "node_modules/@emotion/react": { + "version": "11.13.3", + "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.13.3.tgz", + "integrity": "sha512-lIsdU6JNrmYfJ5EbUCf4xW1ovy5wKQ2CkPRM4xogziOxH1nXxBSjpC9YqbFAP7circxMfYp+6x676BqWcEiixg==", + "license": "MIT", "dependencies": { - "@emotion/hash": "^0.9.1", - "@emotion/memoize": "^0.8.1", - "@emotion/unitless": "^0.8.1", - "@emotion/utils": "^1.2.1", + "@babel/runtime": "^7.18.3", + "@emotion/babel-plugin": "^11.12.0", + "@emotion/cache": "^11.13.0", + "@emotion/serialize": "^1.3.1", + "@emotion/use-insertion-effect-with-fallbacks": "^1.1.0", + "@emotion/utils": "^1.4.0", + "@emotion/weak-memoize": "^0.4.0", + "hoist-non-react-statics": "^3.3.1" + }, + "peerDependencies": { + "react": ">=16.8.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@emotion/serialize": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.3.2.tgz", + "integrity": "sha512-grVnMvVPK9yUVE6rkKfAJlYZgo0cu3l9iMC77V7DW6E1DUIrU68pSEXRmFZFOFB1QFo57TncmOcvcbMDWsL4yA==", + "license": "MIT", + "dependencies": { + "@emotion/hash": "^0.9.2", + "@emotion/memoize": "^0.9.0", + "@emotion/unitless": "^0.10.0", + "@emotion/utils": "^1.4.1", "csstype": "^3.0.2" } }, + "node_modules/@emotion/serialize/node_modules/@emotion/memoize": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz", + "integrity": "sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ==", + "license": "MIT" + }, "node_modules/@emotion/server": { "version": "11.11.0", "resolved": "https://registry.npmjs.org/@emotion/server/-/server-11.11.0.tgz", @@ -2600,9 +2652,10 @@ } }, "node_modules/@emotion/sheet": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.2.2.tgz", - "integrity": "sha512-0QBtGvaqtWi+nx6doRwDdBIzhNdZrXUppvTM4dtZZWEGTXL/XE/yJxLMGlDT1Gt+UHH5IX1n+jkXyytE/av7OA==" + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.4.0.tgz", + "integrity": "sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg==", + "license": "MIT" }, "node_modules/@emotion/stylis": { "version": "0.8.5", @@ -2610,28 +2663,31 @@ "integrity": "sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==" }, "node_modules/@emotion/unitless": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz", - "integrity": "sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ==" + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.10.0.tgz", + "integrity": "sha512-dFoMUuQA20zvtVTuxZww6OHoJYgrzfKM1t52mVySDJnMSEa08ruEvdYQbhvyu6soU+NeLVd3yKfTfT0NeV6qGg==", + "license": "MIT" }, "node_modules/@emotion/use-insertion-effect-with-fallbacks": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz", - "integrity": "sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw==", - "dev": true, + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.1.0.tgz", + "integrity": "sha512-+wBOcIV5snwGgI2ya3u99D7/FJquOIniQT1IKyDsBmEgwvpxMNeS65Oib7OnE2d2aY+3BU4OiH+0Wchf8yk3Hw==", + "license": "MIT", "peerDependencies": { "react": ">=16.8.0" } }, "node_modules/@emotion/utils": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.2.1.tgz", - "integrity": "sha512-Y2tGf3I+XVnajdItskUCn6LX+VUDmP6lTL4fcqsXAv43dnlbZiuW4MWQW38rW/BVWSE7Q/7+XQocmpnRYILUmg==" + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.4.1.tgz", + "integrity": "sha512-BymCXzCG3r72VKJxaYVwOXATqXIZ85cuvg0YOUDxMGNrKc1DJRZk8MgV5wyXRyEayIMd4FuXJIUgTBXvDNW5cA==", + "license": "MIT" }, "node_modules/@emotion/weak-memoize": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz", - "integrity": "sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww==" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz", + "integrity": "sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg==", + "license": "MIT" }, "node_modules/@esbuild/android-arm": { "version": "0.18.20", @@ -4519,6 +4575,149 @@ "@octokit/openapi-types": "^18.0.0" } }, + "node_modules/@peculiar/asn1-cms": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-cms/-/asn1-cms-2.3.13.tgz", + "integrity": "sha512-joqu8A7KR2G85oLPq+vB+NFr2ro7Ls4ol13Zcse/giPSzUNN0n2k3v8kMpf6QdGUhI13e5SzQYN8AKP8sJ8v4w==", + "dependencies": { + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "@peculiar/asn1-x509-attr": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-csr": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-csr/-/asn1-csr-2.3.13.tgz", + "integrity": "sha512-+JtFsOUWCw4zDpxp1LbeTYBnZLlGVOWmHHEhoFdjM5yn4wCn+JiYQ8mghOi36M2f6TPQ17PmhNL6/JfNh7/jCA==", + "dependencies": { + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-ecc": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-ecc/-/asn1-ecc-2.3.13.tgz", + "integrity": "sha512-3dF2pQcrN/WJEMq+9qWLQ0gqtn1G81J4rYqFl6El6QV367b4IuhcRv+yMA84tNNyHOJn9anLXV5radnpPiG3iA==", + "dependencies": { + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-pfx": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-pfx/-/asn1-pfx-2.3.13.tgz", + "integrity": "sha512-fypYxjn16BW+5XbFoY11Rm8LhZf6euqX/C7BTYpqVvLem1GvRl7A+Ro1bO/UPwJL0z+1mbvXEnkG0YOwbwz2LA==", + "dependencies": { + "@peculiar/asn1-cms": "^2.3.13", + "@peculiar/asn1-pkcs8": "^2.3.13", + "@peculiar/asn1-rsa": "^2.3.13", + "@peculiar/asn1-schema": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-pkcs8": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs8/-/asn1-pkcs8-2.3.13.tgz", + "integrity": "sha512-VP3PQzbeSSjPjKET5K37pxyf2qCdM0dz3DJ56ZCsol3FqAXGekb4sDcpoL9uTLGxAh975WcdvUms9UcdZTuGyQ==", + "dependencies": { + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-pkcs9": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs9/-/asn1-pkcs9-2.3.13.tgz", + "integrity": "sha512-rIwQXmHpTo/dgPiWqUgby8Fnq6p1xTJbRMxCiMCk833kQCeZrC5lbSKg6NDnJTnX2kC6IbXBB9yCS2C73U2gJg==", + "dependencies": { + "@peculiar/asn1-cms": "^2.3.13", + "@peculiar/asn1-pfx": "^2.3.13", + "@peculiar/asn1-pkcs8": "^2.3.13", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "@peculiar/asn1-x509-attr": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-rsa": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-rsa/-/asn1-rsa-2.3.13.tgz", + "integrity": "sha512-wBNQqCyRtmqvXkGkL4DR3WxZhHy8fDiYtOjTeCd7SFE5F6GBeafw3EJ94PX/V0OJJrjQ40SkRY2IZu3ZSyBqcg==", + "dependencies": { + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-schema": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.3.13.tgz", + "integrity": "sha512-3Xq3a01WkHRZL8X04Zsfg//mGaA21xlL4tlVn4v2xGT0JStiztATRkMwa5b+f/HXmY2smsiLXYK46Gwgzvfg3g==", + "dependencies": { + "asn1js": "^3.0.5", + "pvtsutils": "^1.3.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-x509": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509/-/asn1-x509-2.3.13.tgz", + "integrity": "sha512-PfeLQl2skXmxX2/AFFCVaWU8U6FKW1Db43mgBhShCOFS1bVxqtvusq1hVjfuEcuSQGedrLdCSvTgabluwN/M9A==", + "dependencies": { + "@peculiar/asn1-schema": "^2.3.13", + "asn1js": "^3.0.5", + "ipaddr.js": "^2.1.0", + "pvtsutils": "^1.3.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-x509-attr": { + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509-attr/-/asn1-x509-attr-2.3.13.tgz", + "integrity": "sha512-WpEos6CcnUzJ6o2Qb68Z7Dz5rSjRGv/DtXITCNBtjZIRWRV12yFVci76SVfOX8sisL61QWMhpLKQibrG8pi2Pw==", + "dependencies": { + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "asn1js": "^3.0.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/asn1-x509/node_modules/ipaddr.js": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz", + "integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@peculiar/x509": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/@peculiar/x509/-/x509-1.11.0.tgz", + "integrity": "sha512-8rdxE//tsWLb2Yo2TYO2P8gieStbrHK/huFMV5PPfwX8I5HmtOus+Ox6nTKrPA9o+WOPaa5xKenee+QdmHBd5g==", + "dependencies": { + "@peculiar/asn1-cms": "^2.3.8", + "@peculiar/asn1-csr": "^2.3.8", + "@peculiar/asn1-ecc": "^2.3.8", + "@peculiar/asn1-pkcs9": "^2.3.8", + "@peculiar/asn1-rsa": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.8", + "@peculiar/asn1-x509": "^2.3.8", + "pvtsutils": "^1.3.5", + "reflect-metadata": "^0.2.2", + "tslib": "^2.6.2", + "tsyringe": "^4.8.0" + } + }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", @@ -4734,6 +4933,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.0.3.tgz", "integrity": "sha512-UBmVDkmR6IvDsloHVN+3rtx4Mi5TFvylYXpluuv0f37dtaz3H99bp8No0LGXRigVpl3UAT4l9j6bIchh42S/Gg==", + "license": "MIT", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", @@ -5797,54 +5997,44 @@ "dev": true }, "node_modules/@sindresorhus/slugify": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@sindresorhus/slugify/-/slugify-2.2.1.tgz", - "integrity": "sha512-MkngSCRZ8JdSOCHRaYd+D01XhvU3Hjy6MGl06zhOk614hp9EOAp5gIkBeQg7wtmxpitU6eAL4kdiRMcJa2dlrw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/slugify/-/slugify-1.1.0.tgz", + "integrity": "sha512-ujZRbmmizX26yS/HnB3P9QNlNa4+UvHh+rIse3RbOXLp8yl6n1TxB4t7NHggtVgS8QmmOtzXo48kCxZGACpkPw==", + "license": "MIT", "dependencies": { - "@sindresorhus/transliterate": "^1.0.0", - "escape-string-regexp": "^5.0.0" + "@sindresorhus/transliterate": "^0.1.1", + "escape-string-regexp": "^4.0.0" }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@sindresorhus/slugify/node_modules/escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", - "engines": { - "node": ">=12" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@sindresorhus/transliterate": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/transliterate/-/transliterate-1.6.0.tgz", - "integrity": "sha512-doH1gimEu3A46VX6aVxpHTeHrytJAG6HgdxntYnCFiIFHEM/ZGpG8KiZGBChchjQmG0XFIBL552kBTjVcMZXwQ==", + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@sindresorhus/transliterate/-/transliterate-0.1.2.tgz", + "integrity": "sha512-5/kmIOY9FF32nicXH+5yLNTX4NJ4atl7jRgqAJuIn/iyDFXBktOKDxCvyGE/EzmF4ngSUvjXxQUQlQiZ5lfw+w==", + "license": "MIT", "dependencies": { - "escape-string-regexp": "^5.0.0" + "escape-string-regexp": "^2.0.0", + "lodash.deburr": "^4.1.0" }, "engines": { - "node": ">=12" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@sindresorhus/transliterate/node_modules/escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "license": "MIT", "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=8" } }, "node_modules/@storybook/addon-actions": { @@ -6199,15 +6389,15 @@ } }, "node_modules/@storybook/builder-manager": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@storybook/builder-manager/-/builder-manager-7.6.8.tgz", - "integrity": "sha512-4CZo1RHPlDJA7G+lJoVdi+/3/L1ERxVxtvwuGgk8CxVDt6vFNpoc7fEGryNv3GRzKN1/luNYNU1MTnCUSn0B2g==", + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/builder-manager/-/builder-manager-7.6.20.tgz", + "integrity": "sha512-e2GzpjLaw6CM/XSmc4qJRzBF8GOoOyotyu3JrSPTYOt4RD8kjUsK4QlismQM1DQRu8i39aIexxmRbiJyD74xzQ==", "dev": true, "dependencies": { "@fal-works/esbuild-plugin-global-externals": "^2.1.2", - "@storybook/core-common": "7.6.8", - "@storybook/manager": "7.6.8", - "@storybook/node-logger": "7.6.8", + "@storybook/core-common": "7.6.20", + "@storybook/manager": "7.6.20", + "@storybook/node-logger": "7.6.20", "@types/ejs": "^3.1.1", "@types/find-cache-dir": "^3.2.1", "@yarnpkg/esbuild-plugin-pnp": "^3.0.0-rc.10", @@ -6226,6 +6416,111 @@ "url": "https://opencollective.com/storybook" } }, + "node_modules/@storybook/builder-manager/node_modules/@storybook/channels": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-7.6.20.tgz", + "integrity": "sha512-4hkgPSH6bJclB2OvLnkZOGZW1WptJs09mhQ6j6qLjgBZzL/ZdD6priWSd7iXrmPiN5TzUobkG4P4Dp7FjkiO7A==", + "dev": true, + "dependencies": { + "@storybook/client-logger": "7.6.20", + "@storybook/core-events": "7.6.20", + "@storybook/global": "^5.0.0", + "qs": "^6.10.0", + "telejson": "^7.2.0", + "tiny-invariant": "^1.3.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/builder-manager/node_modules/@storybook/client-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-7.6.20.tgz", + "integrity": "sha512-NwG0VIJQCmKrSaN5GBDFyQgTAHLNishUPLW1NrzqTDNAhfZUoef64rPQlinbopa0H4OXmlB+QxbQIb3ubeXmSQ==", + "dev": true, + "dependencies": { + "@storybook/global": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/builder-manager/node_modules/@storybook/core-common": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-7.6.20.tgz", + "integrity": "sha512-8H1zPWPjcmeD4HbDm4FDD0WLsfAKGVr566IZ4hG+h3iWVW57II9JW9MLBtiR2LPSd8u7o0kw64lwRGmtCO1qAw==", + "dev": true, + "dependencies": { + "@storybook/core-events": "7.6.20", + "@storybook/node-logger": "7.6.20", + "@storybook/types": "7.6.20", + "@types/find-cache-dir": "^3.2.1", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.6.4", + "@types/pretty-hrtime": "^1.0.0", + "chalk": "^4.1.0", + "esbuild": "^0.18.0", + "esbuild-register": "^3.5.0", + "file-system-cache": "2.3.0", + "find-cache-dir": "^3.0.0", + "find-up": "^5.0.0", + "fs-extra": "^11.1.0", + "glob": "^10.0.0", + "handlebars": "^4.7.7", + "lazy-universal-dotenv": "^4.0.0", + "node-fetch": "^2.0.0", + "picomatch": "^2.3.0", + "pkg-dir": "^5.0.0", + "pretty-hrtime": "^1.0.3", + "resolve-from": "^5.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/builder-manager/node_modules/@storybook/core-events": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-7.6.20.tgz", + "integrity": "sha512-tlVDuVbDiNkvPDFAu+0ou3xBBYbx9zUURQz4G9fAq0ScgBOs/bpzcRrFb4mLpemUViBAd47tfZKdH4MAX45KVQ==", + "dev": true, + "dependencies": { + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/builder-manager/node_modules/@storybook/node-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-7.6.20.tgz", + "integrity": "sha512-l2i4qF1bscJkOplNffcRTsgQWYR7J51ewmizj5YrTM8BK6rslWT1RntgVJWB1RgPqvx6VsCz1gyP3yW1oKxvYw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/builder-manager/node_modules/@storybook/types": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/types/-/types-7.6.20.tgz", + "integrity": "sha512-GncdY3x0LpbhmUAAJwXYtJDUQEwfF175gsjH0/fxPkxPoV7Sef9TM41jQLJW/5+6TnZoCZP/+aJZTJtq3ni23Q==", + "dev": true, + "dependencies": { + "@storybook/channels": "7.6.20", + "@types/babel__core": "^7.0.0", + "@types/express": "^4.7.0", + "file-system-cache": "2.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, "node_modules/@storybook/builder-webpack5": { "version": "7.6.8", "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-7.6.8.tgz", @@ -6332,23 +6627,23 @@ } }, "node_modules/@storybook/cli": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@storybook/cli/-/cli-7.6.8.tgz", - "integrity": "sha512-Is8nkgsbIOu+Jk9Z7x5sgMPgGs9RTVDum3cz9eA4UspPiIBJsf7nGHAWOtc+mCIm6Z3eeNbT1YMOWxz9EuqboA==", + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/cli/-/cli-7.6.20.tgz", + "integrity": "sha512-ZlP+BJyqg7HlnXf7ypjG2CKMI/KVOn03jFIiClItE/jQfgR6kRFgtjRU7uajh427HHfjv9DRiur8nBzuO7vapA==", "dev": true, "dependencies": { "@babel/core": "^7.23.2", "@babel/preset-env": "^7.23.2", "@babel/types": "^7.23.0", "@ndelangen/get-tarball": "^3.0.7", - "@storybook/codemod": "7.6.8", - "@storybook/core-common": "7.6.8", - "@storybook/core-events": "7.6.8", - "@storybook/core-server": "7.6.8", - "@storybook/csf-tools": "7.6.8", - "@storybook/node-logger": "7.6.8", - "@storybook/telemetry": "7.6.8", - "@storybook/types": "7.6.8", + "@storybook/codemod": "7.6.20", + "@storybook/core-common": "7.6.20", + "@storybook/core-events": "7.6.20", + "@storybook/core-server": "7.6.20", + "@storybook/csf-tools": "7.6.20", + "@storybook/node-logger": "7.6.20", + "@storybook/telemetry": "7.6.20", + "@storybook/types": "7.6.20", "@types/semver": "^7.3.4", "@yarnpkg/fslib": "2.10.3", "@yarnpkg/libzip": "2.3.0", @@ -6373,7 +6668,6 @@ "puppeteer-core": "^2.1.1", "read-pkg-up": "^7.0.1", "semver": "^7.3.7", - "simple-update-notifier": "^2.0.0", "strip-json-comments": "^3.0.1", "tempy": "^1.0.1", "ts-dedent": "^2.0.0", @@ -6388,6 +6682,132 @@ "url": "https://opencollective.com/storybook" } }, + "node_modules/@storybook/cli/node_modules/@storybook/channels": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-7.6.20.tgz", + "integrity": "sha512-4hkgPSH6bJclB2OvLnkZOGZW1WptJs09mhQ6j6qLjgBZzL/ZdD6priWSd7iXrmPiN5TzUobkG4P4Dp7FjkiO7A==", + "dev": true, + "dependencies": { + "@storybook/client-logger": "7.6.20", + "@storybook/core-events": "7.6.20", + "@storybook/global": "^5.0.0", + "qs": "^6.10.0", + "telejson": "^7.2.0", + "tiny-invariant": "^1.3.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/cli/node_modules/@storybook/client-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-7.6.20.tgz", + "integrity": "sha512-NwG0VIJQCmKrSaN5GBDFyQgTAHLNishUPLW1NrzqTDNAhfZUoef64rPQlinbopa0H4OXmlB+QxbQIb3ubeXmSQ==", + "dev": true, + "dependencies": { + "@storybook/global": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/cli/node_modules/@storybook/core-common": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-7.6.20.tgz", + "integrity": "sha512-8H1zPWPjcmeD4HbDm4FDD0WLsfAKGVr566IZ4hG+h3iWVW57II9JW9MLBtiR2LPSd8u7o0kw64lwRGmtCO1qAw==", + "dev": true, + "dependencies": { + "@storybook/core-events": "7.6.20", + "@storybook/node-logger": "7.6.20", + "@storybook/types": "7.6.20", + "@types/find-cache-dir": "^3.2.1", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.6.4", + "@types/pretty-hrtime": "^1.0.0", + "chalk": "^4.1.0", + "esbuild": "^0.18.0", + "esbuild-register": "^3.5.0", + "file-system-cache": "2.3.0", + "find-cache-dir": "^3.0.0", + "find-up": "^5.0.0", + "fs-extra": "^11.1.0", + "glob": "^10.0.0", + "handlebars": "^4.7.7", + "lazy-universal-dotenv": "^4.0.0", + "node-fetch": "^2.0.0", + "picomatch": "^2.3.0", + "pkg-dir": "^5.0.0", + "pretty-hrtime": "^1.0.3", + "resolve-from": "^5.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/cli/node_modules/@storybook/core-events": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-7.6.20.tgz", + "integrity": "sha512-tlVDuVbDiNkvPDFAu+0ou3xBBYbx9zUURQz4G9fAq0ScgBOs/bpzcRrFb4mLpemUViBAd47tfZKdH4MAX45KVQ==", + "dev": true, + "dependencies": { + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/cli/node_modules/@storybook/csf-tools": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-7.6.20.tgz", + "integrity": "sha512-rwcwzCsAYh/m/WYcxBiEtLpIW5OH1ingxNdF/rK9mtGWhJxXRDV8acPkFrF8rtFWIVKoOCXu5USJYmc3f2gdYQ==", + "dev": true, + "dependencies": { + "@babel/generator": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/traverse": "^7.23.2", + "@babel/types": "^7.23.0", + "@storybook/csf": "^0.1.2", + "@storybook/types": "7.6.20", + "fs-extra": "^11.1.0", + "recast": "^0.23.1", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/cli/node_modules/@storybook/node-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-7.6.20.tgz", + "integrity": "sha512-l2i4qF1bscJkOplNffcRTsgQWYR7J51ewmizj5YrTM8BK6rslWT1RntgVJWB1RgPqvx6VsCz1gyP3yW1oKxvYw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/cli/node_modules/@storybook/types": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/types/-/types-7.6.20.tgz", + "integrity": "sha512-GncdY3x0LpbhmUAAJwXYtJDUQEwfF175gsjH0/fxPkxPoV7Sef9TM41jQLJW/5+6TnZoCZP/+aJZTJtq3ni23Q==", + "dev": true, + "dependencies": { + "@storybook/channels": "7.6.20", + "@types/babel__core": "^7.0.0", + "@types/express": "^4.7.0", + "file-system-cache": "2.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, "node_modules/@storybook/cli/node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -6432,26 +6852,11 @@ "node": ">=10.17.0" } }, - "node_modules/@storybook/cli/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@storybook/cli/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -6459,12 +6864,6 @@ "node": ">=10" } }, - "node_modules/@storybook/cli/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/@storybook/client-api": { "version": "7.6.8", "resolved": "https://registry.npmjs.org/@storybook/client-api/-/client-api-7.6.8.tgz", @@ -6493,18 +6892,18 @@ } }, "node_modules/@storybook/codemod": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@storybook/codemod/-/codemod-7.6.8.tgz", - "integrity": "sha512-3Gk+ZsD35DUgqbbRNdX547kzZK/ajIbgwynmR0FuPhZhhZuYI4+2eMNzdmI/Oe9Nov4R16senQuAZjw/Dc5LrA==", + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/codemod/-/codemod-7.6.20.tgz", + "integrity": "sha512-8vmSsksO4XukNw0TmqylPmk7PxnfNfE21YsxFa7mnEBmEKQcZCQsNil4ZgWfG0IzdhTfhglAN4r++Ew0WE+PYA==", "dev": true, "dependencies": { "@babel/core": "^7.23.2", "@babel/preset-env": "^7.23.2", "@babel/types": "^7.23.0", "@storybook/csf": "^0.1.2", - "@storybook/csf-tools": "7.6.8", - "@storybook/node-logger": "7.6.8", - "@storybook/types": "7.6.8", + "@storybook/csf-tools": "7.6.20", + "@storybook/node-logger": "7.6.20", + "@storybook/types": "7.6.20", "@types/cross-spawn": "^6.0.2", "cross-spawn": "^7.0.3", "globby": "^11.0.2", @@ -6518,6 +6917,97 @@ "url": "https://opencollective.com/storybook" } }, + "node_modules/@storybook/codemod/node_modules/@storybook/channels": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-7.6.20.tgz", + "integrity": "sha512-4hkgPSH6bJclB2OvLnkZOGZW1WptJs09mhQ6j6qLjgBZzL/ZdD6priWSd7iXrmPiN5TzUobkG4P4Dp7FjkiO7A==", + "dev": true, + "dependencies": { + "@storybook/client-logger": "7.6.20", + "@storybook/core-events": "7.6.20", + "@storybook/global": "^5.0.0", + "qs": "^6.10.0", + "telejson": "^7.2.0", + "tiny-invariant": "^1.3.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/codemod/node_modules/@storybook/client-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-7.6.20.tgz", + "integrity": "sha512-NwG0VIJQCmKrSaN5GBDFyQgTAHLNishUPLW1NrzqTDNAhfZUoef64rPQlinbopa0H4OXmlB+QxbQIb3ubeXmSQ==", + "dev": true, + "dependencies": { + "@storybook/global": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/codemod/node_modules/@storybook/core-events": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-7.6.20.tgz", + "integrity": "sha512-tlVDuVbDiNkvPDFAu+0ou3xBBYbx9zUURQz4G9fAq0ScgBOs/bpzcRrFb4mLpemUViBAd47tfZKdH4MAX45KVQ==", + "dev": true, + "dependencies": { + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/codemod/node_modules/@storybook/csf-tools": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-7.6.20.tgz", + "integrity": "sha512-rwcwzCsAYh/m/WYcxBiEtLpIW5OH1ingxNdF/rK9mtGWhJxXRDV8acPkFrF8rtFWIVKoOCXu5USJYmc3f2gdYQ==", + "dev": true, + "dependencies": { + "@babel/generator": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/traverse": "^7.23.2", + "@babel/types": "^7.23.0", + "@storybook/csf": "^0.1.2", + "@storybook/types": "7.6.20", + "fs-extra": "^11.1.0", + "recast": "^0.23.1", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/codemod/node_modules/@storybook/node-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-7.6.20.tgz", + "integrity": "sha512-l2i4qF1bscJkOplNffcRTsgQWYR7J51ewmizj5YrTM8BK6rslWT1RntgVJWB1RgPqvx6VsCz1gyP3yW1oKxvYw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/codemod/node_modules/@storybook/types": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/types/-/types-7.6.20.tgz", + "integrity": "sha512-GncdY3x0LpbhmUAAJwXYtJDUQEwfF175gsjH0/fxPkxPoV7Sef9TM41jQLJW/5+6TnZoCZP/+aJZTJtq3ni23Q==", + "dev": true, + "dependencies": { + "@storybook/channels": "7.6.20", + "@types/babel__core": "^7.0.0", + "@types/express": "^4.7.0", + "file-system-cache": "2.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, "node_modules/@storybook/components": { "version": "7.6.8", "resolved": "https://registry.npmjs.org/@storybook/components/-/components-7.6.8.tgz", @@ -6762,26 +7252,26 @@ } }, "node_modules/@storybook/core-server": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@storybook/core-server/-/core-server-7.6.8.tgz", - "integrity": "sha512-/csAFNuAhF11f6D9neYNavmKPFK/ZxTskaktc4iDwBRgBM95kZ6DBFjg9ErRi5Q8Z/i92wk6qORkq4bkN/lI9w==", + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-server/-/core-server-7.6.20.tgz", + "integrity": "sha512-qC5BdbqqwMLTdCwMKZ1Hbc3+3AaxHYWLiJaXL9e8s8nJw89xV8c8l30QpbJOGvcDmsgY6UTtXYaJ96OsTr7MrA==", "dev": true, "dependencies": { "@aw-web-design/x-default-browser": "1.4.126", "@discoveryjs/json-ext": "^0.5.3", - "@storybook/builder-manager": "7.6.8", - "@storybook/channels": "7.6.8", - "@storybook/core-common": "7.6.8", - "@storybook/core-events": "7.6.8", + "@storybook/builder-manager": "7.6.20", + "@storybook/channels": "7.6.20", + "@storybook/core-common": "7.6.20", + "@storybook/core-events": "7.6.20", "@storybook/csf": "^0.1.2", - "@storybook/csf-tools": "7.6.8", + "@storybook/csf-tools": "7.6.20", "@storybook/docs-mdx": "^0.1.0", "@storybook/global": "^5.0.0", - "@storybook/manager": "7.6.8", - "@storybook/node-logger": "7.6.8", - "@storybook/preview-api": "7.6.8", - "@storybook/telemetry": "7.6.8", - "@storybook/types": "7.6.8", + "@storybook/manager": "7.6.20", + "@storybook/node-logger": "7.6.20", + "@storybook/preview-api": "7.6.20", + "@storybook/telemetry": "7.6.20", + "@storybook/types": "7.6.20", "@types/detect-port": "^1.3.0", "@types/node": "^18.0.0", "@types/pretty-hrtime": "^1.0.0", @@ -6794,7 +7284,6 @@ "express": "^4.17.3", "fs-extra": "^11.1.0", "globby": "^11.0.2", - "ip": "^2.0.0", "lodash": "^4.17.21", "open": "^8.4.0", "pretty-hrtime": "^1.0.3", @@ -6814,26 +7303,163 @@ "url": "https://opencollective.com/storybook" } }, - "node_modules/@storybook/core-server/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "node_modules/@storybook/core-server/node_modules/@storybook/channels": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-7.6.20.tgz", + "integrity": "sha512-4hkgPSH6bJclB2OvLnkZOGZW1WptJs09mhQ6j6qLjgBZzL/ZdD6priWSd7iXrmPiN5TzUobkG4P4Dp7FjkiO7A==", "dev": true, "dependencies": { - "yallist": "^4.0.0" + "@storybook/client-logger": "7.6.20", + "@storybook/core-events": "7.6.20", + "@storybook/global": "^5.0.0", + "qs": "^6.10.0", + "telejson": "^7.2.0", + "tiny-invariant": "^1.3.1" }, - "engines": { - "node": ">=10" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/core-server/node_modules/@storybook/client-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-7.6.20.tgz", + "integrity": "sha512-NwG0VIJQCmKrSaN5GBDFyQgTAHLNishUPLW1NrzqTDNAhfZUoef64rPQlinbopa0H4OXmlB+QxbQIb3ubeXmSQ==", + "dev": true, + "dependencies": { + "@storybook/global": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/core-server/node_modules/@storybook/core-common": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-7.6.20.tgz", + "integrity": "sha512-8H1zPWPjcmeD4HbDm4FDD0WLsfAKGVr566IZ4hG+h3iWVW57II9JW9MLBtiR2LPSd8u7o0kw64lwRGmtCO1qAw==", + "dev": true, + "dependencies": { + "@storybook/core-events": "7.6.20", + "@storybook/node-logger": "7.6.20", + "@storybook/types": "7.6.20", + "@types/find-cache-dir": "^3.2.1", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.6.4", + "@types/pretty-hrtime": "^1.0.0", + "chalk": "^4.1.0", + "esbuild": "^0.18.0", + "esbuild-register": "^3.5.0", + "file-system-cache": "2.3.0", + "find-cache-dir": "^3.0.0", + "find-up": "^5.0.0", + "fs-extra": "^11.1.0", + "glob": "^10.0.0", + "handlebars": "^4.7.7", + "lazy-universal-dotenv": "^4.0.0", + "node-fetch": "^2.0.0", + "picomatch": "^2.3.0", + "pkg-dir": "^5.0.0", + "pretty-hrtime": "^1.0.3", + "resolve-from": "^5.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/core-server/node_modules/@storybook/core-events": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-7.6.20.tgz", + "integrity": "sha512-tlVDuVbDiNkvPDFAu+0ou3xBBYbx9zUURQz4G9fAq0ScgBOs/bpzcRrFb4mLpemUViBAd47tfZKdH4MAX45KVQ==", + "dev": true, + "dependencies": { + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/core-server/node_modules/@storybook/csf-tools": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-7.6.20.tgz", + "integrity": "sha512-rwcwzCsAYh/m/WYcxBiEtLpIW5OH1ingxNdF/rK9mtGWhJxXRDV8acPkFrF8rtFWIVKoOCXu5USJYmc3f2gdYQ==", + "dev": true, + "dependencies": { + "@babel/generator": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/traverse": "^7.23.2", + "@babel/types": "^7.23.0", + "@storybook/csf": "^0.1.2", + "@storybook/types": "7.6.20", + "fs-extra": "^11.1.0", + "recast": "^0.23.1", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/core-server/node_modules/@storybook/node-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-7.6.20.tgz", + "integrity": "sha512-l2i4qF1bscJkOplNffcRTsgQWYR7J51ewmizj5YrTM8BK6rslWT1RntgVJWB1RgPqvx6VsCz1gyP3yW1oKxvYw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/core-server/node_modules/@storybook/preview-api": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/preview-api/-/preview-api-7.6.20.tgz", + "integrity": "sha512-3ic2m9LDZEPwZk02wIhNc3n3rNvbi7VDKn52hDXfAxnL5EYm7yDICAkaWcVaTfblru2zn0EDJt7ROpthscTW5w==", + "dev": true, + "dependencies": { + "@storybook/channels": "7.6.20", + "@storybook/client-logger": "7.6.20", + "@storybook/core-events": "7.6.20", + "@storybook/csf": "^0.1.2", + "@storybook/global": "^5.0.0", + "@storybook/types": "7.6.20", + "@types/qs": "^6.9.5", + "dequal": "^2.0.2", + "lodash": "^4.17.21", + "memoizerific": "^1.11.3", + "qs": "^6.10.0", + "synchronous-promise": "^2.0.15", + "ts-dedent": "^2.0.0", + "util-deprecate": "^1.0.2" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/core-server/node_modules/@storybook/types": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/types/-/types-7.6.20.tgz", + "integrity": "sha512-GncdY3x0LpbhmUAAJwXYtJDUQEwfF175gsjH0/fxPkxPoV7Sef9TM41jQLJW/5+6TnZoCZP/+aJZTJtq3ni23Q==", + "dev": true, + "dependencies": { + "@storybook/channels": "7.6.20", + "@types/babel__core": "^7.0.0", + "@types/express": "^4.7.0", + "file-system-cache": "2.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" } }, "node_modules/@storybook/core-server/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -6841,12 +7467,6 @@ "node": ">=10" } }, - "node_modules/@storybook/core-server/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/@storybook/core-webpack": { "version": "7.6.8", "resolved": "https://registry.npmjs.org/@storybook/core-webpack/-/core-webpack-7.6.8.tgz", @@ -6940,9 +7560,9 @@ "dev": true }, "node_modules/@storybook/manager": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@storybook/manager/-/manager-7.6.8.tgz", - "integrity": "sha512-INoXXoHXyw9PPMJAOAhwf9u2GNDDNdv1JAI1fhrbCAECzDabHT9lRVUo6v8I5XMc+YdMHLM1Vz38DbB+w18hFw==", + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/manager/-/manager-7.6.20.tgz", + "integrity": "sha512-0Cf6WN0t7yEG2DR29tN5j+i7H/TH5EfPppg9h9/KiQSoFHk+6KLoy2p5do94acFU+Ro4+zzxvdCGbcYGKuArpg==", "dev": true, "funding": { "type": "opencollective", @@ -7378,14 +7998,14 @@ } }, "node_modules/@storybook/telemetry": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@storybook/telemetry/-/telemetry-7.6.8.tgz", - "integrity": "sha512-hHUS3fyHjKR3ZdbG+/OVI+pwXXKOmS8L8GMuWKlpUovvCYBLm0/Q0MUQ9XaLuByOCzvAurqB3Owp3ZV7GiY30Q==", + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/telemetry/-/telemetry-7.6.20.tgz", + "integrity": "sha512-dmAOCWmOscYN6aMbhCMmszQjoycg7tUPRVy2kTaWg6qX10wtMrvEtBV29W4eMvqdsoRj5kcvoNbzRdYcWBUOHQ==", "dev": true, "dependencies": { - "@storybook/client-logger": "7.6.8", - "@storybook/core-common": "7.6.8", - "@storybook/csf-tools": "7.6.8", + "@storybook/client-logger": "7.6.20", + "@storybook/core-common": "7.6.20", + "@storybook/csf-tools": "7.6.20", "chalk": "^4.1.0", "detect-package-manager": "^2.0.1", "fetch-retry": "^5.0.2", @@ -7397,6 +8017,132 @@ "url": "https://opencollective.com/storybook" } }, + "node_modules/@storybook/telemetry/node_modules/@storybook/channels": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-7.6.20.tgz", + "integrity": "sha512-4hkgPSH6bJclB2OvLnkZOGZW1WptJs09mhQ6j6qLjgBZzL/ZdD6priWSd7iXrmPiN5TzUobkG4P4Dp7FjkiO7A==", + "dev": true, + "dependencies": { + "@storybook/client-logger": "7.6.20", + "@storybook/core-events": "7.6.20", + "@storybook/global": "^5.0.0", + "qs": "^6.10.0", + "telejson": "^7.2.0", + "tiny-invariant": "^1.3.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/telemetry/node_modules/@storybook/client-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-7.6.20.tgz", + "integrity": "sha512-NwG0VIJQCmKrSaN5GBDFyQgTAHLNishUPLW1NrzqTDNAhfZUoef64rPQlinbopa0H4OXmlB+QxbQIb3ubeXmSQ==", + "dev": true, + "dependencies": { + "@storybook/global": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/telemetry/node_modules/@storybook/core-common": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-7.6.20.tgz", + "integrity": "sha512-8H1zPWPjcmeD4HbDm4FDD0WLsfAKGVr566IZ4hG+h3iWVW57II9JW9MLBtiR2LPSd8u7o0kw64lwRGmtCO1qAw==", + "dev": true, + "dependencies": { + "@storybook/core-events": "7.6.20", + "@storybook/node-logger": "7.6.20", + "@storybook/types": "7.6.20", + "@types/find-cache-dir": "^3.2.1", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.6.4", + "@types/pretty-hrtime": "^1.0.0", + "chalk": "^4.1.0", + "esbuild": "^0.18.0", + "esbuild-register": "^3.5.0", + "file-system-cache": "2.3.0", + "find-cache-dir": "^3.0.0", + "find-up": "^5.0.0", + "fs-extra": "^11.1.0", + "glob": "^10.0.0", + "handlebars": "^4.7.7", + "lazy-universal-dotenv": "^4.0.0", + "node-fetch": "^2.0.0", + "picomatch": "^2.3.0", + "pkg-dir": "^5.0.0", + "pretty-hrtime": "^1.0.3", + "resolve-from": "^5.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/telemetry/node_modules/@storybook/core-events": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-7.6.20.tgz", + "integrity": "sha512-tlVDuVbDiNkvPDFAu+0ou3xBBYbx9zUURQz4G9fAq0ScgBOs/bpzcRrFb4mLpemUViBAd47tfZKdH4MAX45KVQ==", + "dev": true, + "dependencies": { + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/telemetry/node_modules/@storybook/csf-tools": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-7.6.20.tgz", + "integrity": "sha512-rwcwzCsAYh/m/WYcxBiEtLpIW5OH1ingxNdF/rK9mtGWhJxXRDV8acPkFrF8rtFWIVKoOCXu5USJYmc3f2gdYQ==", + "dev": true, + "dependencies": { + "@babel/generator": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/traverse": "^7.23.2", + "@babel/types": "^7.23.0", + "@storybook/csf": "^0.1.2", + "@storybook/types": "7.6.20", + "fs-extra": "^11.1.0", + "recast": "^0.23.1", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/telemetry/node_modules/@storybook/node-logger": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-7.6.20.tgz", + "integrity": "sha512-l2i4qF1bscJkOplNffcRTsgQWYR7J51ewmizj5YrTM8BK6rslWT1RntgVJWB1RgPqvx6VsCz1gyP3yW1oKxvYw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/telemetry/node_modules/@storybook/types": { + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/@storybook/types/-/types-7.6.20.tgz", + "integrity": "sha512-GncdY3x0LpbhmUAAJwXYtJDUQEwfF175gsjH0/fxPkxPoV7Sef9TM41jQLJW/5+6TnZoCZP/+aJZTJtq3ni23Q==", + "dev": true, + "dependencies": { + "@storybook/channels": "7.6.20", + "@types/babel__core": "^7.0.0", + "@types/express": "^4.7.0", + "file-system-cache": "2.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, "node_modules/@storybook/testing-library": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/@storybook/testing-library/-/testing-library-0.2.2.tgz", @@ -7897,9 +8643,9 @@ "dev": true }, "node_modules/@types/emscripten": { - "version": "1.39.10", - "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.39.10.tgz", - "integrity": "sha512-TB/6hBkYQJxsZHSqyeuO1Jt0AB/bW6G7rHt9g7lML7SOF6lbgcHvw/Lr+69iqN0qxgXLhWKScAon73JNnptuDw==", + "version": "1.39.13", + "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.39.13.tgz", + "integrity": "sha512-cFq+fO/isvhvmuP/+Sl4K4jtU6E23DoivtbO4r50e3odaxAiVdbfSYRDdJ4gCdxx+3aRjhphS5ZMwIH4hFy/Cw==", "dev": true }, "node_modules/@types/escodegen": { @@ -7908,26 +8654,6 @@ "integrity": "sha512-AjwI4MvWx3HAOaZqYsjKWyEObT9lcVV0Y0V8nXo6cXzN8ZiMxVhf6F3d/UNvXVGKrEzL/Dluc5p+y9GkzlTWig==", "dev": true }, - "node_modules/@types/eslint": { - "version": "8.56.2", - "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.2.tgz", - "integrity": "sha512-uQDwm1wFHmbBbCZCqAlq6Do9LYwByNZHWzXppSnay9SuwJ+VRbjkbLABer54kcPnMSlG6Fdiy2yaFXm/z9Z5gw==", - "dev": true, - "dependencies": { - "@types/estree": "*", - "@types/json-schema": "*" - } - }, - "node_modules/@types/eslint-scope": { - "version": "3.7.7", - "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", - "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", - "dev": true, - "dependencies": { - "@types/eslint": "*", - "@types/estree": "*" - } - }, "node_modules/@types/estree": { "version": "0.0.51", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", @@ -8133,6 +8859,15 @@ "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.11.tgz", "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==" }, + "node_modules/@types/qrcode": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@types/qrcode/-/qrcode-1.5.5.tgz", + "integrity": "sha512-CdfBi/e3Qk+3Z/fXYShipBT13OJ2fDO2Q2w5CIP5anLTLIndQG9z6P1cnm+8zCWSpm5dnxMFd/uREtb0EXuQzg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/qs": { "version": "6.9.11", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.11.tgz", @@ -8172,6 +8907,15 @@ "redux": "^4.0.0" } }, + "node_modules/@types/react-transition-group": { + "version": "4.4.11", + "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.11.tgz", + "integrity": "sha512-RM05tAniPZ5DZPzzNFP+DmrcOdD0efDUxMy3145oljWSl3x9ZV5vhme98gTxFrj2lhXvmGNnUiuDyJgY9IKkNA==", + "license": "MIT", + "dependencies": { + "@types/react": "*" + } + }, "node_modules/@types/resolve": { "version": "1.20.6", "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.6.tgz", @@ -8597,9 +9341,9 @@ "dev": true }, "node_modules/@webassemblyjs/ast": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz", - "integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz", + "integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==", "dev": true, "dependencies": { "@webassemblyjs/helper-numbers": "1.11.6", @@ -8619,9 +9363,9 @@ "dev": true }, "node_modules/@webassemblyjs/helper-buffer": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz", - "integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz", + "integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==", "dev": true }, "node_modules/@webassemblyjs/helper-numbers": { @@ -8642,15 +9386,15 @@ "dev": true }, "node_modules/@webassemblyjs/helper-wasm-section": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz", - "integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz", + "integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", - "@webassemblyjs/helper-buffer": "1.11.6", + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/wasm-gen": "1.11.6" + "@webassemblyjs/wasm-gen": "1.12.1" } }, "node_modules/@webassemblyjs/ieee754": { @@ -8678,28 +9422,28 @@ "dev": true }, "node_modules/@webassemblyjs/wasm-edit": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz", - "integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz", + "integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", - "@webassemblyjs/helper-buffer": "1.11.6", + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/helper-wasm-section": "1.11.6", - "@webassemblyjs/wasm-gen": "1.11.6", - "@webassemblyjs/wasm-opt": "1.11.6", - "@webassemblyjs/wasm-parser": "1.11.6", - "@webassemblyjs/wast-printer": "1.11.6" + "@webassemblyjs/helper-wasm-section": "1.12.1", + "@webassemblyjs/wasm-gen": "1.12.1", + "@webassemblyjs/wasm-opt": "1.12.1", + "@webassemblyjs/wasm-parser": "1.12.1", + "@webassemblyjs/wast-printer": "1.12.1" } }, "node_modules/@webassemblyjs/wasm-gen": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz", - "integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz", + "integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", + "@webassemblyjs/ast": "1.12.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/ieee754": "1.11.6", "@webassemblyjs/leb128": "1.11.6", @@ -8707,24 +9451,24 @@ } }, "node_modules/@webassemblyjs/wasm-opt": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz", - "integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz", + "integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", - "@webassemblyjs/helper-buffer": "1.11.6", - "@webassemblyjs/wasm-gen": "1.11.6", - "@webassemblyjs/wasm-parser": "1.11.6" + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", + "@webassemblyjs/wasm-gen": "1.12.1", + "@webassemblyjs/wasm-parser": "1.12.1" } }, "node_modules/@webassemblyjs/wasm-parser": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz", - "integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz", + "integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", + "@webassemblyjs/ast": "1.12.1", "@webassemblyjs/helper-api-error": "1.11.6", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/ieee754": "1.11.6", @@ -8733,12 +9477,12 @@ } }, "node_modules/@webassemblyjs/wast-printer": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz", - "integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz", + "integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", + "@webassemblyjs/ast": "1.12.1", "@xtuc/long": "4.2.2" } }, @@ -9052,7 +9796,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, "engines": { "node": ">=8" } @@ -9061,7 +9804,6 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, "dependencies": { "color-convert": "^2.0.1" }, @@ -9313,6 +10055,19 @@ "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", "dev": true }, + "node_modules/asn1js": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.5.tgz", + "integrity": "sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==", + "dependencies": { + "pvtsutils": "^1.3.2", + "pvutils": "^1.1.3", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/assert": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/assert/-/assert-2.1.0.tgz", @@ -9482,12 +10237,13 @@ } }, "node_modules/axios": { - "version": "0.27.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz", - "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.28.0.tgz", + "integrity": "sha512-Tu7NYoGY4Yoc7I+Npf9HhUMtEEpV7ZiLH9yndTCoNhcpBH0kwcvFbzYN9/u5QKI5A6uefjsNNWaz5olJVYS62Q==", "dependencies": { - "follow-redirects": "^1.14.9", - "form-data": "^4.0.0" + "follow-redirects": "^1.15.0", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" } }, "node_modules/axios-auth-refresh": { @@ -9498,6 +10254,11 @@ "axios": ">= 0.18 < 0.19.0 || >= 0.19.1" } }, + "node_modules/axios/node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, "node_modules/axobject-query": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz", @@ -9984,9 +10745,9 @@ "dev": true }, "node_modules/body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "dev": true, "dependencies": { "bytes": "3.1.2", @@ -9997,7 +10758,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -10022,21 +10783,6 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, - "node_modules/body-parser/node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dev": true, - "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -10066,12 +10812,12 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -10299,14 +11045,19 @@ } }, "node_modules/call-bind": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", - "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -10334,7 +11085,6 @@ "version": "5.3.1", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, "engines": { "node": ">=6" } @@ -10531,9 +11281,9 @@ } }, "node_modules/citty": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.5.tgz", - "integrity": "sha512-AS7n5NSc0OQVMV9v6wt3ByujNIrne0/cTjiC2MYqhvao57VNfiuVksTSr2p17nVOhEr2KtqiAkGwHcgMC/qUuQ==", + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", + "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", "dev": true, "dependencies": { "consola": "^3.2.3" @@ -10634,6 +11384,29 @@ "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==" }, + "node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/clone": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", @@ -10893,6 +11666,12 @@ "safe-buffer": "~5.1.0" } }, + "node_modules/confbox": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.7.tgz", + "integrity": "sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==", + "dev": true + }, "node_modules/confusing-browser-globals": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz", @@ -10947,9 +11726,9 @@ "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==" }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "dev": true, "engines": { "node": ">= 0.6" @@ -11533,6 +12312,14 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/decode-named-character-reference": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", @@ -11658,17 +12445,20 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-lazy-prop": { @@ -11862,9 +12652,9 @@ } }, "node_modules/detect-port": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.5.1.tgz", - "integrity": "sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.6.1.tgz", + "integrity": "sha512-CmnVc+Hek2egPx1PeTFVta2W78xy2K/9Rkf6cC4T59S50tVnzKj+tnx5mmx5lwvCkujZ4uRrpRSuV+IVs3f90Q==", "dev": true, "dependencies": { "address": "^1.0.1", @@ -11873,6 +12663,9 @@ "bin": { "detect": "bin/detect-port.js", "detect-port": "bin/detect-port.js" + }, + "engines": { + "node": ">= 4.0.0" } }, "node_modules/detective": { @@ -11923,6 +12716,11 @@ "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", "dev": true }, + "node_modules/dijkstrajs": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dijkstrajs/-/dijkstrajs-1.0.3.tgz", + "integrity": "sha512-qiSlmBq9+BCdCA/L46dw8Uy93mloxsPSbwnm5yrKn2vMPiy8KyAskTF6zuV/j5BMsmOGZDPs7KjU+mjb670kfA==" + }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -11968,6 +12766,16 @@ "utila": "~0.4" } }, + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, "node_modules/dom-serializer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", @@ -12019,9 +12827,10 @@ } }, "node_modules/dompurify": { - "version": "2.4.7", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.4.7.tgz", - "integrity": "sha512-kxxKlPEDa6Nc5WJi+qRgPbOAbgTpSULL+vI3NUXsZMlkJxTqYI9wg5ZTay2sFrdZRWHPWNi+EdAhcJf81WtoMQ==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.5.6.tgz", + "integrity": "sha512-zUTaUBO8pY4+iJMPE1B9XlO2tXVYIcEA4SNGtvDELzTSCQO7RzH+j7S180BmhmJId78lqGU2z19vgVx2Sxs/PQ==", + "license": "(MPL-2.0 OR Apache-2.0)", "optional": true }, "node_modules/domutils": { @@ -12204,9 +13013,9 @@ "integrity": "sha512-JGmudTwg7yxMYvR/gWbalqqQiyu7WTFv2Xu3vw4cJHXPFxNgAk0oy8UHaer8nLF4lZJa+rNoj6GsrKIVJTV6Tw==" }, "node_modules/elliptic": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", - "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "version": "6.5.7", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.7.tgz", + "integrity": "sha512-ESVCtTwiA+XhY3wyh24QqRGBoP3rEdDUl3EDUUo9tft074fi19IrdpH7hLCMMP3CIj7jb3W96rn8lt/BqIlt5Q==", "dev": true, "dependencies": { "bn.js": "^4.11.9", @@ -12240,9 +13049,9 @@ } }, "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "dev": true, "engines": { "node": ">= 0.8" @@ -12269,9 +13078,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz", - "integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==", + "version": "5.17.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz", + "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==", "dev": true, "dependencies": { "graceful-fs": "^4.2.4", @@ -12306,9 +13115,9 @@ } }, "node_modules/envinfo": { - "version": "7.11.0", - "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.11.0.tgz", - "integrity": "sha512-G9/6xF1FPbIw0TtalAMaVPpiq2aDEuKLXM314jPVAO9r2fo2a4BLqMNkmRS7O/xPPZ+COAhGIz3ETvHEV3eUcg==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.13.0.tgz", + "integrity": "sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==", "dev": true, "bin": { "envinfo": "dist/cli.js" @@ -12401,6 +13210,27 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-get-iterator": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", @@ -13333,37 +14163,37 @@ } }, "node_modules/express": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", - "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", + "version": "4.21.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz", + "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==", "dev": true, "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.2", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.2.0", + "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.10", "proxy-addr": "~2.0.7", - "qs": "6.11.0", + "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -13389,21 +14219,6 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, - "node_modules/express/node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dev": true, - "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -13652,9 +14467,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" @@ -13673,13 +14488,13 @@ } }, "node_modules/finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", "dev": true, "dependencies": { "debug": "2.6.9", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", @@ -13828,9 +14643,9 @@ "dev": true }, "node_modules/flow-parser": { - "version": "0.226.0", - "resolved": "https://registry.npmjs.org/flow-parser/-/flow-parser-0.226.0.tgz", - "integrity": "sha512-YlH+Y/P/5s0S7Vg14RwXlJMF/JsGfkG7gcKB/zljyoqaPNX9YVsGzx+g6MLTbhZaWbPhs4347aTpmSb9GgiPtw==", + "version": "0.239.1", + "resolved": "https://registry.npmjs.org/flow-parser/-/flow-parser-0.239.1.tgz", + "integrity": "sha512-topOrETNxJ6T2gAnQiWqAlzGPj8uI2wtmNOlDIMNB+qyvGJZ6R++STbUOTAYmvPhOMz2gXnXPH0hOvURYmrBow==", "dev": true, "engines": { "node": ">=0.4.0" @@ -14172,17 +14987,29 @@ "node": ">=6.9.0" } }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, "node_modules/get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dev": true, "dependencies": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -14287,18 +15114,18 @@ } }, "node_modules/giget": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.1.tgz", - "integrity": "sha512-4VG22mopWtIeHwogGSy1FViXVo0YT+m6BrqZfz0JJFwbSsePsCdOzdLIIli5BtMp7Xe8f/o2OmBpQX2NBOC24g==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.3.tgz", + "integrity": "sha512-8EHPljDvs7qKykr6uw8b+lqLiUc/vUg+KVTI0uND4s63TdsZM2Xus3mflvF0DDG9SiM4RlCkFGL+7aAjRmV7KA==", "dev": true, "dependencies": { - "citty": "^0.1.5", + "citty": "^0.1.6", "consola": "^3.2.3", - "defu": "^6.1.3", - "node-fetch-native": "^1.6.1", - "nypm": "^0.3.3", + "defu": "^6.1.4", + "node-fetch-native": "^1.6.3", + "nypm": "^0.3.8", "ohash": "^1.1.3", - "pathe": "^1.1.1", + "pathe": "^1.1.2", "tar": "^6.2.0" }, "bin": { @@ -14604,12 +15431,12 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -15138,11 +15965,11 @@ } }, "node_modules/infisical-node/node_modules/axios": { - "version": "1.6.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.5.tgz", - "integrity": "sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==", + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.5.tgz", + "integrity": "sha512-fZu86yCo+svH3uqJ/yTdQ0QHpQu5oL+/QE+QPSv6BZSkDAoky9vytxp7u5qk83OJFS3kEBcesWni9WTZAv3tSw==", "dependencies": { - "follow-redirects": "^1.15.4", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -15203,12 +16030,6 @@ "loose-envify": "^1.0.0" } }, - "node_modules/ip": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.1.tgz", - "integrity": "sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==", - "dev": true - }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -15443,7 +16264,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, "engines": { "node": ">=8" } @@ -15856,9 +16676,9 @@ } }, "node_modules/jake": { - "version": "10.8.7", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.7.tgz", - "integrity": "sha512-ZDi3aP+fG/LchyBzUM804VjddnwfSfsdeYkwt8NcbKRvo4rFkjhs456iLFn3k2ZUWvNe4i48WACDbza8fhq2+w==", + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.1.tgz", + "integrity": "sha512-61btcOHNnLnsOdtLgA5efqQWjnSi/vow5HbI7HMdKKWqvrKR1bLK3BPlJn9gcSaP2ewuamUSMB5XEy76KUIS2w==", "dev": true, "dependencies": { "async": "^3.2.3", @@ -16024,9 +16844,9 @@ "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==" }, "node_modules/jscodeshift": { - "version": "0.15.1", - "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.15.1.tgz", - "integrity": "sha512-hIJfxUy8Rt4HkJn/zZPU9ChKfKZM1342waJ1QC2e2YsPcWhM+3BJ4dcfQCzArTrk1jJeNLB341H+qOcEHRxJZg==", + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.15.2.tgz", + "integrity": "sha512-FquR7Okgmc4Sd0aEDwqho3rEiKR3BdvuG9jfdHjLJ6JQoWSMpavug3AoIfnfWhxFlf+5pzQh8qjqz0DWFrNQzA==", "dev": true, "dependencies": { "@babel/core": "^7.23.0", @@ -16164,22 +16984,29 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/jspdf": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/jspdf/-/jspdf-2.5.1.tgz", - "integrity": "sha512-hXObxz7ZqoyhxET78+XR34Xu2qFGrJJ2I2bE5w4SM8eFaFEkW2xcGRVUss360fYelwRSid/jT078kbNvmoW0QA==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jspdf/-/jspdf-2.5.2.tgz", + "integrity": "sha512-myeX9c+p7znDWPk0eTrujCzNjT+CXdXyk7YmJq5nD5V7uLLKmSXnlQ/Jn/kuo3X09Op70Apm0rQSnFWyGK8uEQ==", + "license": "MIT", "dependencies": { - "@babel/runtime": "^7.14.0", + "@babel/runtime": "^7.23.2", "atob": "^2.1.2", "btoa": "^1.2.1", - "fflate": "^0.4.8" + "fflate": "^0.8.1" }, "optionalDependencies": { "canvg": "^3.0.6", "core-js": "^3.6.0", - "dompurify": "^2.2.0", + "dompurify": "^2.5.4", "html2canvas": "^1.0.0-rc.5" } }, + "node_modules/jspdf/node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "license": "MIT" + }, "node_modules/jsprim": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", @@ -16561,6 +17388,12 @@ "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", "dev": true }, + "node_modules/lodash.deburr": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/lodash.deburr/-/lodash.deburr-4.1.0.tgz", + "integrity": "sha512-m/M1U1f3ddMCs6Hq2tAsYThTBDaAKFDX3dwDo97GEYzamXi9SqUpjWi/Rrj/gf3X2n8ktwgZrlP1z6E3v/IExQ==", + "license": "MIT" + }, "node_modules/lodash.isplainobject": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", @@ -16954,10 +17787,13 @@ } }, "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==", - "dev": true + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/merge-stream": { "version": "2.0.0", @@ -17405,12 +18241,12 @@ ] }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -17587,6 +18423,30 @@ "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", "dev": true }, + "node_modules/mlly": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.1.tgz", + "integrity": "sha512-rrVRZRELyQzrIUAVMHxP97kv+G786pHmOKzuFII8zDYahFBS7qnHh2AlYSl1GAHhaMPCz6/oHjVMcfFYgFYHgA==", + "dev": true, + "dependencies": { + "acorn": "^8.11.3", + "pathe": "^1.1.2", + "pkg-types": "^1.1.1", + "ufo": "^1.5.3" + } + }, + "node_modules/mlly/node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -17897,9 +18757,9 @@ } }, "node_modules/node-fetch-native": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.1.tgz", - "integrity": "sha512-bW9T/uJDPAJB2YNYEpWzE54U5O3MQidXsOyTfnbKYtTtFexRvGzb1waphBN4ZwP6EcIvYYEOwW0b72BpAqydTw==", + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.4.tgz", + "integrity": "sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==", "dev": true }, "node_modules/node-int64": { @@ -18061,15 +18921,17 @@ } }, "node_modules/nypm": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.3.4.tgz", - "integrity": "sha512-1JLkp/zHBrkS3pZ692IqOaIKSYHmQXgqfELk6YTOfVBnwealAmPA1q2kKK7PHJAHSMBozerThEFZXP3G6o7Ukg==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.3.9.tgz", + "integrity": "sha512-BI2SdqqTHg2d4wJh8P9A1W+bslg33vOE9IZDY6eR2QC+Pu1iNBVZUqczrd43rJb+fMzHU7ltAYKsEFY/kHMFcw==", "dev": true, "dependencies": { - "citty": "^0.1.5", + "citty": "^0.1.6", + "consola": "^3.2.3", "execa": "^8.0.1", - "pathe": "^1.1.1", - "ufo": "^1.3.2" + "pathe": "^1.1.2", + "pkg-types": "^1.1.1", + "ufo": "^1.5.3" }, "bin": { "nypm": "dist/cli.mjs" @@ -18147,9 +19009,9 @@ } }, "node_modules/nypm/node_modules/npm-run-path": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.2.0.tgz", - "integrity": "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", "dev": true, "dependencies": { "path-key": "^4.0.0" @@ -18528,7 +19390,6 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, "engines": { "node": ">=6" } @@ -18634,7 +19495,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, "engines": { "node": ">=8" } @@ -18688,9 +19548,9 @@ } }, "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==", + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz", + "integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==", "dev": true }, "node_modules/path-type": { @@ -18799,9 +19659,9 @@ "devOptional": true }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==" }, "node_modules/picomatch": { "version": "2.3.1", @@ -18844,6 +19704,25 @@ "node": ">=10" } }, + "node_modules/pkg-types": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.1.3.tgz", + "integrity": "sha512-+JrgthZG6m3ckicaOB74TwQ+tBWsFl3qVQg7mN8ulwSOElJ7gBhKzj2VkCPnZ4NlF6kEquYU+RIYNVAvzd54UA==", + "dev": true, + "dependencies": { + "confbox": "^0.1.7", + "mlly": "^1.7.1", + "pathe": "^1.1.2" + } + }, + "node_modules/pngjs": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-5.0.0.tgz", + "integrity": "sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw==", + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/pnp-webpack-plugin": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/pnp-webpack-plugin/-/pnp-webpack-plugin-1.7.0.tgz", @@ -18880,9 +19759,9 @@ } }, "node_modules/postcss": { - "version": "8.4.33", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.33.tgz", - "integrity": "sha512-Kkpbhhdjw2qQs2O2DGX+8m5OVqEcbB9HRBvuYM9pgrjEFUg30A9LmXNlTAUj4S9kgtGyrMbTzVjH7E+s5Re2yg==", + "version": "8.4.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz", + "integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==", "funding": [ { "type": "opencollective", @@ -18899,8 +19778,8 @@ ], "dependencies": { "nanoid": "^3.3.7", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "picocolors": "^1.0.1", + "source-map-js": "^1.2.0" }, "engines": { "node": "^10 || ^12 || >=14" @@ -19643,6 +20522,7 @@ "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", @@ -19693,6 +20573,7 @@ "version": "2.7.1", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, "dependencies": { "glob": "^7.1.3" @@ -19702,21 +20583,53 @@ } }, "node_modules/puppeteer-core/node_modules/ws": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz", - "integrity": "sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==", + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.3.tgz", + "integrity": "sha512-jmTjYU0j60B+vHey6TfR3Z7RD61z/hmxBS3VMSGIrroOWXQEneK1zNuotOUrGyBHQj0yrpsLHPWtigEFd13ndA==", "dev": true, "dependencies": { "async-limiter": "~1.0.0" } }, + "node_modules/pvtsutils": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/pvtsutils/-/pvtsutils-1.3.5.tgz", + "integrity": "sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA==", + "dependencies": { + "tslib": "^2.6.1" + } + }, + "node_modules/pvutils": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/pvutils/-/pvutils-1.1.3.tgz", + "integrity": "sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/qrcode": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/qrcode/-/qrcode-1.5.4.tgz", + "integrity": "sha512-1ca71Zgiu6ORjHqFBDpnSMTR2ReToX4l1Au1VFLyVeBTFavzQnv5JxMFr3ukHVKpSrSA2MCk0lNJSykjUfz7Zg==", + "dependencies": { + "dijkstrajs": "^1.0.1", + "pngjs": "^5.0.0", + "yargs": "^15.3.1" + }, + "bin": { + "qrcode": "bin/qrcode" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/qs": { - "version": "6.11.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.2.tgz", - "integrity": "sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "dev": true, "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" }, "engines": { "node": ">=0.6" @@ -20185,6 +21098,14 @@ } } }, + "node_modules/react-icons": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.3.0.tgz", + "integrity": "sha512-DnUk8aFbTyQPSkCfF8dbX6kQjXA9DktMeJqfjrg6cK9vwQVMxmcA3BfP4QoiztVmEHtwlTgLFsPuH2NskKT6eg==", + "peerDependencies": { + "react": "*" + } + }, "node_modules/react-is": { "version": "18.2.0", "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", @@ -20337,6 +21258,33 @@ "react": ">= 16.3" } }, + "node_modules/react-select": { + "version": "5.8.1", + "resolved": "https://registry.npmjs.org/react-select/-/react-select-5.8.1.tgz", + "integrity": "sha512-RT1CJmuc+ejqm5MPgzyZujqDskdvB9a9ZqrdnVLsvAHjJ3Tj0hELnLeVPQlmYdVKCdCpxanepl6z7R5KhXhWzg==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.12.0", + "@emotion/cache": "^11.4.0", + "@emotion/react": "^11.8.1", + "@floating-ui/dom": "^1.0.1", + "@types/react-transition-group": "^4.4.0", + "memoize-one": "^6.0.0", + "prop-types": "^15.6.0", + "react-transition-group": "^4.3.0", + "use-isomorphic-layout-effect": "^1.1.2" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/react-select/node_modules/memoize-one": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-6.0.0.tgz", + "integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==", + "license": "MIT" + }, "node_modules/react-style-singleton": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz", @@ -20391,6 +21339,22 @@ "node": ">=6" } }, + "node_modules/react-transition-group": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", + "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", + "license": "BSD-3-Clause", + "dependencies": { + "@babel/runtime": "^7.5.5", + "dom-helpers": "^5.0.1", + "loose-envify": "^1.4.0", + "prop-types": "^15.6.2" + }, + "peerDependencies": { + "react": ">=16.6.0", + "react-dom": ">=16.6.0" + } + }, "node_modules/read-cache": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", @@ -20576,6 +21540,11 @@ "redux": "^4" } }, + "node_modules/reflect-metadata": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz", + "integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==" + }, "node_modules/reflect.getprototypeof": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz", @@ -20950,6 +21919,14 @@ "throttleit": "^1.0.0" } }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/require-from-string": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", @@ -20959,6 +21936,11 @@ "node": ">=0.10.0" } }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" + }, "node_modules/requireindex": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/requireindex/-/requireindex-1.2.0.tgz", @@ -21347,9 +22329,9 @@ } }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dev": true, "dependencies": { "debug": "2.6.9", @@ -21385,6 +22367,15 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/serialize-javascript": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", @@ -21395,36 +22386,42 @@ } }, "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "dev": true, "dependencies": { - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", - "send": "0.18.0" + "send": "0.19.0" }, "engines": { "node": ">= 0.8.0" } }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" + }, "node_modules/set-cookie-parser": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz", "integrity": "sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==" }, "node_modules/set-function-length": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", - "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dev": true, "dependencies": { - "define-data-property": "^1.1.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -21576,14 +22573,18 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -21653,51 +22654,6 @@ "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" }, - "node_modules/simple-update-notifier": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", - "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", - "dev": true, - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/simple-update-notifier/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/simple-update-notifier/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/simple-update-notifier/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/sisteransi": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", @@ -21736,9 +22692,9 @@ } }, "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", "engines": { "node": ">=0.10.0" } @@ -21788,9 +22744,9 @@ } }, "node_modules/spdx-exceptions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", - "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", "dev": true }, "node_modules/spdx-expression-parse": { @@ -21804,9 +22760,9 @@ } }, "node_modules/spdx-license-ids": { - "version": "3.0.16", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.16.tgz", - "integrity": "sha512-eWN+LnM3GR6gPu35WxNgbGl8rmY1AEmoMDvL/QD6zYmPWgywxWqJWNdLGT+ke8dKNWrcYgYjPpG5gbTfghP8rw==", + "version": "3.0.18", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.18.tgz", + "integrity": "sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==", "dev": true }, "node_modules/split-on-first": { @@ -21902,12 +22858,12 @@ "dev": true }, "node_modules/storybook": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/storybook/-/storybook-7.6.8.tgz", - "integrity": "sha512-ugRtDSs2eTgHMOZ3wKXbUEbPnlJ2XImPbnvxNssK14py2mHKwPnhSqLNrjlQMkmkO13GdjalLDyj4lZtoYdo0Q==", + "version": "7.6.20", + "resolved": "https://registry.npmjs.org/storybook/-/storybook-7.6.20.tgz", + "integrity": "sha512-Wt04pPTO71pwmRmsgkyZhNo4Bvdb/1pBAMsIFb9nQLykEdzzpXjvingxFFvdOG4nIowzwgxD+CLlyRqVJqnATw==", "dev": true, "dependencies": { - "@storybook/cli": "7.6.8" + "@storybook/cli": "7.6.20" }, "bin": { "sb": "index.js", @@ -21997,9 +22953,9 @@ } }, "node_modules/stream-shift": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.2.tgz", - "integrity": "sha512-rV4Bovi9xx0BFzOb/X0B2GqoIjvqPCttZdu0Wgtx2Dxkj7ETyWl9gmqJ4EutWRLvtZWm8dxE+InQZX1IryZn/w==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==", "dev": true }, "node_modules/streamx": { @@ -22035,7 +22991,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -22069,8 +23024,7 @@ "node_modules/string-width/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/string.prototype.matchall": { "version": "4.0.10", @@ -22141,7 +23095,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -22548,6 +23501,7 @@ "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", @@ -22568,6 +23522,7 @@ "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, "dependencies": { "glob": "^7.1.3" @@ -23006,6 +23961,22 @@ "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", "dev": true }, + "node_modules/tsyringe": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/tsyringe/-/tsyringe-4.8.0.tgz", + "integrity": "sha512-YB1FG+axdxADa3ncEtRnQCFq/M0lALGLxSZeVNbTU8NqhOVc51nnv2CISTcvc1kyv6EGPtXVr0v6lWeDxiijOA==", + "dependencies": { + "tslib": "^1.9.3" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/tsyringe/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, "node_modules/tty-browserify": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", @@ -23161,9 +24132,9 @@ "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" }, "node_modules/ufo": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.3.2.tgz", - "integrity": "sha512-o+ORpgGwaYQXgqGDwd+hkS4PuZ3QnmqMMxRuajK/a38L6fTpcE5GPIfrf+L/KemFzfUpeUQc1rRS1iDBozvnFA==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.3.tgz", + "integrity": "sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==", "dev": true }, "node_modules/uglify-js": { @@ -23488,6 +24459,20 @@ } } }, + "node_modules/use-isomorphic-layout-effect": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz", + "integrity": "sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==", + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/use-memo-one": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/use-memo-one/-/use-memo-one-1.1.3.tgz", @@ -23718,9 +24703,9 @@ } }, "node_modules/watchpack": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz", - "integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz", + "integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==", "dev": true, "dependencies": { "glob-to-regexp": "^0.4.1", @@ -23745,34 +24730,33 @@ "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "node_modules/webpack": { - "version": "5.89.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.89.0.tgz", - "integrity": "sha512-qyfIC10pOr70V+jkmud8tMfajraGCZMBWJtrmuBymQKCrLTRejBI8STDp1MCyZu/QTdZSeacCQYpYNQVOzX5kw==", + "version": "5.94.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz", + "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==", "dev": true, "dependencies": { - "@types/eslint-scope": "^3.7.3", - "@types/estree": "^1.0.0", - "@webassemblyjs/ast": "^1.11.5", - "@webassemblyjs/wasm-edit": "^1.11.5", - "@webassemblyjs/wasm-parser": "^1.11.5", + "@types/estree": "^1.0.5", + "@webassemblyjs/ast": "^1.12.1", + "@webassemblyjs/wasm-edit": "^1.12.1", + "@webassemblyjs/wasm-parser": "^1.12.1", "acorn": "^8.7.1", - "acorn-import-assertions": "^1.9.0", - "browserslist": "^4.14.5", + "acorn-import-attributes": "^1.9.5", + "browserslist": "^4.21.10", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.15.0", + "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.2.9", + "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.2.0", "tapable": "^2.1.1", - "terser-webpack-plugin": "^5.3.7", - "watchpack": "^2.4.0", + "terser-webpack-plugin": "^5.3.10", + "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" }, "bin": { @@ -23905,9 +24889,9 @@ "dev": true }, "node_modules/webpack/node_modules/acorn": { - "version": "8.11.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", - "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -23916,10 +24900,10 @@ "node": ">=0.4.0" } }, - "node_modules/webpack/node_modules/acorn-import-assertions": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", - "integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", + "node_modules/webpack/node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", "dev": true, "peerDependencies": { "acorn": "^8" @@ -24006,6 +24990,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/which-module": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", + "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==" + }, "node_modules/which-typed-array": { "version": "1.1.13", "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", @@ -24141,9 +25130,9 @@ } }, "node_modules/ws": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", - "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", "dev": true, "engines": { "node": ">=10.0.0" @@ -24170,6 +25159,11 @@ "node": ">=0.4" } }, + "node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==" + }, "node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -24183,6 +25177,87 @@ "node": ">= 14" } }, + "node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/yauzl": { "version": "2.10.0", "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index a4acb57382..b9e85254a5 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -12,6 +12,11 @@ "storybook": "storybook dev -p 6006 -s ./public", "build-storybook": "storybook build" }, + "overrides": { + "@storybook/nextjs": { + "sharp": "npm:dry-uninstall" + } + }, "dependencies": { "@casl/ability": "^6.5.0", "@casl/react": "^3.1.0", @@ -30,9 +35,11 @@ "@headlessui/react": "^1.7.7", "@hookform/resolvers": "^2.9.10", "@octokit/rest": "^19.0.7", + "@peculiar/x509": "^1.11.0", "@radix-ui/react-accordion": "^1.1.2", "@radix-ui/react-alert-dialog": "^1.0.5", "@radix-ui/react-checkbox": "^1.0.4", + "@radix-ui/react-collapsible": "^1.0.3", "@radix-ui/react-dialog": "^1.0.5", "@radix-ui/react-dropdown-menu": "^2.0.6", "@radix-ui/react-hover-card": "^1.0.7", @@ -47,7 +54,7 @@ "@radix-ui/react-toast": "^1.1.5", "@radix-ui/react-tooltip": "^1.0.7", "@reduxjs/toolkit": "^1.8.3", - "@sindresorhus/slugify": "^2.2.1", + "@sindresorhus/slugify": "1.1.0", "@stripe/react-stripe-js": "^1.16.3", "@stripe/stripe-js": "^1.46.0", "@tanstack/react-query": "^4.23.0", @@ -55,7 +62,7 @@ "@ucast/mongo2js": "^1.3.4", "add": "^2.0.6", "argon2-browser": "^1.18.0", - "axios": "^0.27.2", + "axios": "^0.28.0", "axios-auth-refresh": "^3.3.6", "base64-loader": "^1.0.0", "classnames": "^2.3.1", @@ -71,7 +78,7 @@ "i18next-browser-languagedetector": "^7.0.1", "i18next-http-backend": "^2.2.0", "infisical-node": "^1.0.37", - "jspdf": "^2.5.1", + "jspdf": "^2.5.2", "jsrp": "^0.2.4", "jwt-decode": "^3.1.2", "lottie-react": "^2.4.0", @@ -81,6 +88,7 @@ "nprogress": "^0.2.0", "picomatch": "^2.3.1", "posthog-js": "^1.105.6", + "qrcode": "^1.5.4", "query-string": "^7.1.3", "react": "^17.0.2", "react-beautiful-dnd": "^13.1.1", @@ -90,9 +98,11 @@ "react-grid-layout": "^1.3.4", "react-hook-form": "^7.43.0", "react-i18next": "^12.2.2", + "react-icons": "^5.3.0", "react-mailchimp-subscribe": "^2.1.3", "react-markdown": "^8.0.3", "react-redux": "^8.0.2", + "react-select": "^5.8.1", "react-table": "^7.8.0", "react-toastify": "^9.1.3", "sanitize-html": "^2.12.1", @@ -124,6 +134,7 @@ "@types/jsrp": "^0.2.4", "@types/node": "^18.11.9", "@types/picomatch": "^2.3.0", + "@types/qrcode": "^1.5.5", "@types/react": "^18.0.26", "@types/sanitize-html": "^2.9.0", "@typescript-eslint/eslint-plugin": "^5.48.1", @@ -143,10 +154,10 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-simple-import-sort": "^8.0.0", "eslint-plugin-storybook": "^0.6.12", - "postcss": "^8.4.14", + "postcss": "^8.4.39", "prettier": "^2.8.3", "prettier-plugin-tailwindcss": "^0.2.2", - "storybook": "^7.5.2", + "storybook": "^7.6.20", "storybook-dark-mode": "^3.0.0", "tailwindcss": "3.2", "typescript": "^4.9.3" diff --git a/frontend/public/data/frequentConstants.ts b/frontend/public/data/frequentConstants.ts index cf90ef659f..1a536e5224 100644 --- a/frontend/public/data/frequentConstants.ts +++ b/frontend/public/data/frequentConstants.ts @@ -16,6 +16,7 @@ const integrationSlugNameMapping: Mapping = { railway: "Railway", flyio: "Fly.io", circleci: "CircleCI", + databricks: "Databricks", travisci: "TravisCI", supabase: "Supabase", checkly: "Checkly", @@ -27,13 +28,15 @@ const integrationSlugNameMapping: Mapping = { "cloudflare-workers": "Cloudflare Workers", codefresh: "Codefresh", "digital-ocean-app-platform": "Digital Ocean App Platform", - bitbucket: "BitBucket", + bitbucket: "Bitbucket", "cloud-66": "Cloud 66", northflank: "Northflank", windmill: "Windmill", "gcp-secret-manager": "GCP Secret Manager", "hasura-cloud": "Hasura Cloud", - rundeck: "Rundeck" + rundeck: "Rundeck", + "azure-devops": "Azure DevOps", + "azure-app-configuration": "Azure App Configuration" }; const envMapping: Mapping = { diff --git a/frontend/public/images/integrations/Databricks.png b/frontend/public/images/integrations/Databricks.png new file mode 100644 index 0000000000..ec0ddbc026 Binary files /dev/null and b/frontend/public/images/integrations/Databricks.png differ diff --git a/frontend/public/images/secretRotation/mssqlserver.png b/frontend/public/images/secretRotation/mssqlserver.png new file mode 100644 index 0000000000..108ed60f91 Binary files /dev/null and b/frontend/public/images/secretRotation/mssqlserver.png differ diff --git a/frontend/src/components/analytics/posthog.ts b/frontend/src/components/analytics/posthog.ts index cc26e55121..f9285012ea 100644 --- a/frontend/src/components/analytics/posthog.ts +++ b/frontend/src/components/analytics/posthog.ts @@ -10,7 +10,7 @@ export const initPostHog = () => { try { if (typeof window !== "undefined") { // @ts-ignore - if (ENV === "production" && TELEMETRY_CAPTURING_ENABLED === "true") { + if (ENV === "production" && TELEMETRY_CAPTURING_ENABLED === true) { posthog.init(POSTHOG_API_KEY, { api_host: POSTHOG_HOST }); diff --git a/frontend/src/components/basic/dialog/AddUserDialog.tsx b/frontend/src/components/basic/dialog/AddUserDialog.tsx deleted file mode 100644 index dd2aede0a6..0000000000 --- a/frontend/src/components/basic/dialog/AddUserDialog.tsx +++ /dev/null @@ -1,116 +0,0 @@ -import { Fragment } from "react"; -import { Dialog, Transition } from "@headlessui/react"; - -import Button from "../buttons/Button"; -import InputField from "../InputField"; - -type Props = { - isOpen: boolean; - closeModal: () => void; - submitModal: (email: string) => void; - email: string; - setEmail: (email: string) => void; - orgName: string; -}; - -const AddUserDialog = ({ isOpen, closeModal, submitModal, email, setEmail, orgName }: Props) => { - const submit = () => { - submitModal(email); - }; - - return ( -
- - - -
- - -
-
- - - - Invite others to {orgName} - -
-

- An invite is specific to an email address and expires after 1 day. For - security reasons, you will need to separately add members to projects. -

-
-
- -
-
-
-
- {/* - - Unleash Infisical's Full Power - -
-

- You have exceeded the number of members in a free organization. -

-

- Upgrade now and get access to adding more members, as well as to other powerful enhancements. -

-
-
- - -
-
*/} -
-
-
-
-
-
- ); -}; - -export default AddUserDialog; diff --git a/frontend/src/components/dashboard/DropZone.tsx b/frontend/src/components/dashboard/DropZone.tsx index ba0b9efdba..ab246c377b 100644 --- a/frontend/src/components/dashboard/DropZone.tsx +++ b/frontend/src/components/dashboard/DropZone.tsx @@ -6,10 +6,12 @@ import { faUpload } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { parseDocument, Scalar, YAMLMap } from "yaml"; +import { SecretType } from "@app/hooks/api/types"; + import Button from "../basic/buttons/Button"; import Error from "../basic/Error"; import { createNotification } from "../notifications"; -import { parseDotEnv } from "../utilities/parseDotEnv"; +import { parseDotEnv } from "../utilities/parseSecrets"; import guidGenerator from "../utilities/randomId"; interface DropZoneProps { @@ -33,7 +35,6 @@ const DropZone = ({ numCurrentRows }: DropZoneProps) => { const { t } = useTranslation(); - const handleDragEnter = (e: DragEvent) => { e.preventDefault(); @@ -66,7 +67,7 @@ const DropZone = ({ key, value: keyPairs[key as keyof typeof keyPairs].value, comment: keyPairs[key as keyof typeof keyPairs].comments.join("\n"), - type: "shared", + type: SecretType.Shared, tags: [] })); break; @@ -79,7 +80,7 @@ const DropZone = ({ key, value: keyPairs[key as keyof typeof keyPairs], comment: "", - type: "shared", + type: SecretType.Shared, tags: [] })); break; @@ -102,7 +103,7 @@ const DropZone = ({ key, value: keyPairs[key as keyof typeof keyPairs]?.toString() ?? "", comment, - type: "shared", + type: SecretType.Shared, tags: [] }; }); @@ -132,7 +133,7 @@ const DropZone = ({ if (file === undefined) { createNotification({ text: "You can't inject files from VS Code. Click 'Reveal in finder', and drag your file directly from the directory where it's located.", - type: "error", + type: "error" }); setLoading(false); return; diff --git a/frontend/src/components/features/FormLabelToolTip.tsx b/frontend/src/components/features/FormLabelToolTip.tsx new file mode 100644 index 0000000000..584c47aae5 --- /dev/null +++ b/frontend/src/components/features/FormLabelToolTip.tsx @@ -0,0 +1,36 @@ +import { faQuestionCircle } from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; + +import { FormLabel, Tooltip } from "../v2"; + +// To give users example of possible values of TTL +export const FormLabelToolTip = ({ label, linkToMore, content }: { label: string, linkToMore: string, content: string }) => ( +
+ + {content}{" "} + + More + + + } + > + + + } + /> +
+); diff --git a/frontend/src/components/features/TtlFormLabel.tsx b/frontend/src/components/features/TtlFormLabel.tsx index 14382abb4d..5278feec79 100644 --- a/frontend/src/components/features/TtlFormLabel.tsx +++ b/frontend/src/components/features/TtlFormLabel.tsx @@ -1,36 +1,12 @@ -import { faQuestionCircle } from "@fortawesome/free-solid-svg-icons"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; - -import { FormLabel, Tooltip } from "../v2"; +import { FormLabelToolTip } from "./FormLabelToolTip"; // To give users example of possible values of TTL export const TtlFormLabel = ({ label }: { label: string }) => (
- - 1m, 2h, 3d.{" "} - - More - - - } - > - - - } + content="1m, 2h, 3d. " + linkToMore="https://github.com/vercel/ms?tab=readme-ov-file#examples" />
); diff --git a/frontend/src/components/mfa/TotpRegistration.tsx b/frontend/src/components/mfa/TotpRegistration.tsx new file mode 100644 index 0000000000..59d4914f5a --- /dev/null +++ b/frontend/src/components/mfa/TotpRegistration.tsx @@ -0,0 +1,76 @@ +import { useEffect, useState } from "react"; +import QRCode from "qrcode"; + +import { useGetUserTotpRegistration } from "@app/hooks/api"; +import { useVerifyUserTotpRegistration } from "@app/hooks/api/users/mutation"; + +import { createNotification } from "../notifications"; +import { Button, ContentLoader, Input } from "../v2"; + +type Props = { + onComplete?: () => Promise; +}; + +const TotpRegistration = ({ onComplete }: Props) => { + const { data: registration, isLoading } = useGetUserTotpRegistration(); + const { mutateAsync: verifyUserTotp, isLoading: isVerifyLoading } = + useVerifyUserTotpRegistration(); + const [qrCodeUrl, setQrCodeUrl] = useState(""); + const [totp, setTotp] = useState(""); + + const handleTotpVerify = async (event: React.FormEvent) => { + event.preventDefault(); + await verifyUserTotp({ + totp + }); + + createNotification({ + text: "Successfully configured mobile authenticator", + type: "success" + }); + + if (onComplete) { + onComplete(); + } + }; + + useEffect(() => { + const generateQRCode = async () => { + if (registration?.otpUrl) { + const url = await QRCode.toDataURL(registration.otpUrl); + setQrCodeUrl(url); + } + }; + + generateQRCode(); + }, [registration]); + + if (isLoading) { + return ; + } + return ( +
+
+ Download a two-step verification app (Duo, Google Authenticator, etc.) and scan the QR code. +
+
+ registration-qr +
+
+
Enter the resulting verification code
+
+ setTotp(e.target.value)} + value={totp} + placeholder="Verification code" + /> + +
+
+
+ ); +}; + +export default TotpRegistration; diff --git a/frontend/src/components/navigation/NavHeader.tsx b/frontend/src/components/navigation/NavHeader.tsx index cecae52870..715d8c51d8 100644 --- a/frontend/src/components/navigation/NavHeader.tsx +++ b/frontend/src/components/navigation/NavHeader.tsx @@ -100,7 +100,7 @@ export default function NavHeader({ onValueChange={(value) => { if (value && onEnvChange) onEnvChange(value); }} - className="bg-transparent pl-0 text-sm font-medium text-primary/80 hover:text-primary" + className="border-none bg-transparent pl-0 text-sm font-medium text-primary/80 hover:text-primary" dropdownContainerClassName="text-bunker-200 bg-mineshaft-800 border border-mineshaft-600 drop-shadow-2xl" > {userAvailableEnvs?.map(({ name, slug }) => ( diff --git a/frontend/src/components/navigation/RegionSelect.tsx b/frontend/src/components/navigation/RegionSelect.tsx new file mode 100644 index 0000000000..44f2336cd8 --- /dev/null +++ b/frontend/src/components/navigation/RegionSelect.tsx @@ -0,0 +1,147 @@ +import { useRouter } from "next/router"; +import { faCheck } from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; + +import { Modal, ModalContent, ModalTrigger, Select, SelectItem } from "@app/components/v2"; + +enum Region { + US = "us", + EU = "eu" +} + +const regions = [ + { + value: Region.US, + label: "United States", + location: "Virginia, USA", + flag: ( + + + + + + + + + + ) + }, + { + value: Region.EU, + label: "Europe", + location: "Frankfurt, Germany", + flag: ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) + } +]; + +export const RegionSelect = () => { + const router = useRouter(); + + const handleRegionSelect = (value: Region) => { + router.push(`https://${value}.infisical.com/${router.pathname}`); + }; + + const shouldDisplay = + window.location.origin.includes("https://app.infisical.com") || + window.location.origin.includes("https://us.infisical.com") || + window.location.origin.includes("https://eu.infisical.com") || + window.location.origin.includes("http://localhost:8080"); + + // only display region select for cloud + if (!shouldDisplay) return null; + + const [subdomain] = window.location.host.split("."); + + // default to US if not eu + const currentRegion = subdomain === Region.EU ? regions[1] : regions[0]; + + return ( +
+ + + + + + + {regions.map(({ value, label, location, flag }) => ( +
+

+ {flag} + {value.toUpperCase()} Region +

+
    +
  • + Fastest + option if you are based in {value === Region.US ? "the" : ""} {label} +
  • +
  • + Data + storage compliance for this region +
  • +
  • + Hosted + in {location} +
  • +
+
+ ))} +
+
+
+ ); +}; diff --git a/frontend/src/components/notifications/Notifications.tsx b/frontend/src/components/notifications/Notifications.tsx index 0d6b0d0615..befe79e4f3 100644 --- a/frontend/src/components/notifications/Notifications.tsx +++ b/frontend/src/components/notifications/Notifications.tsx @@ -26,4 +26,4 @@ export const createNotification = ( type: myProps?.type || "info", }); -export const NotificationContainer = () => ; +export const NotificationContainer = () => ; diff --git a/frontend/src/components/permissions/GlobPermissionInfo.tsx b/frontend/src/components/permissions/GlobPermissionInfo.tsx new file mode 100644 index 0000000000..b65b0c3074 --- /dev/null +++ b/frontend/src/components/permissions/GlobPermissionInfo.tsx @@ -0,0 +1,33 @@ +import { useState } from "react"; +import picomatch from "picomatch"; + +import { FormControl } from "../v2/FormControl"; +import { Input } from "../v2/Input"; + +export const GlobPermissionInfo = () => { + const [pattern, setPattern] = useState(""); + const [text, setText] = useState(""); + + return ( +
+
A glob pattern uses wildcards to match resources or paths.
+
+ + setPattern(e.target.value)} /> + +
+
+ + setText(e.target.value)} /> + +
+
+ ); +}; diff --git a/frontend/src/components/permissions/ProjectPermissionCan.tsx b/frontend/src/components/permissions/ProjectPermissionCan.tsx index f1af141f2c..4fc5acde95 100644 --- a/frontend/src/components/permissions/ProjectPermissionCan.tsx +++ b/frontend/src/components/permissions/ProjectPermissionCan.tsx @@ -1,23 +1,25 @@ import { FunctionComponent, ReactNode } from "react"; -import { BoundCanProps, Can } from "@casl/react"; +import { AbilityTuple, MongoAbility } from "@casl/ability"; +import { Can } from "@casl/react"; -import { TProjectPermission, useProjectPermission } from "@app/context/ProjectPermissionContext"; +import { ProjectPermissionSet, useProjectPermission } from "@app/context/ProjectPermissionContext"; -import { Tooltip } from "../v2"; +import { Tooltip } from "../v2/Tooltip"; -type Props = { +type Props = { label?: ReactNode; // this prop is used when there exist already a tooltip as helper text for users // so when permission is allowed same tooltip will be reused to show helpertext renderTooltip?: boolean; allowedLabel?: string; - // BUG(akhilmhdh): As a workaround for now i put any but this should be TProjectPermission - // For some reason when i put TProjectPermission in a wrapper component it just wont work causes a weird ts error - // tried a lot combinations - // REF: https://github.com/stalniy/casl/blob/ac081a34f56366a7eaaed05d21689d27041ef005/packages/casl-react/src/factory.ts#L15 -} & BoundCanProps; + children: ReactNode | ((isAllowed: boolean, ability: T) => ReactNode); + passThrough?: boolean; + I: T[0]; + a: T[1]; + ability?: MongoAbility; +}; -export const ProjectPermissionCan: FunctionComponent = ({ +export const ProjectPermissionCan: FunctionComponent> = ({ label = "Access restricted", children, passThrough = true, @@ -31,9 +33,7 @@ export const ProjectPermissionCan: FunctionComponent = ({ {(isAllowed, ability) => { // akhilmhdh: This is set as type due to error in casl react type. const finalChild = - typeof children === "function" - ? children(isAllowed, ability as TProjectPermission) - : children; + typeof children === "function" ? children(isAllowed, ability as any) : children; if (!isAllowed && passThrough) { return {finalChild}; diff --git a/frontend/src/components/permissions/index.tsx b/frontend/src/components/permissions/index.tsx index 8d523c3111..5103b0f73f 100644 --- a/frontend/src/components/permissions/index.tsx +++ b/frontend/src/components/permissions/index.tsx @@ -1,3 +1,4 @@ +export { GlobPermissionInfo } from "./GlobPermissionInfo"; export { OrgPermissionCan } from "./OrgPermissionCan"; export { PermissionDeniedBanner } from "./PermissionDeniedBanner"; export { ProjectPermissionCan } from "./ProjectPermissionCan"; diff --git a/frontend/src/components/signup/EnterEmailStep.tsx b/frontend/src/components/signup/EnterEmailStep.tsx index 2b5b0d1389..1b1a5c8a3a 100644 --- a/frontend/src/components/signup/EnterEmailStep.tsx +++ b/frontend/src/components/signup/EnterEmailStep.tsx @@ -50,7 +50,8 @@ export default function EnterEmailStep({ // If everything is correct, go to the next step if (!emailCheckBool) { try { - await mutateAsync({ email }); + await mutateAsync({ email: email.toLowerCase() }); + setEmail(email.toLowerCase()) incrementStep(); } catch (e) { if (axios.isAxiosError(e)) { diff --git a/frontend/src/components/signup/InitialSignupStep.tsx b/frontend/src/components/signup/InitialSignupStep.tsx index e5c23f3334..fd63f33f0a 100644 --- a/frontend/src/components/signup/InitialSignupStep.tsx +++ b/frontend/src/components/signup/InitialSignupStep.tsx @@ -4,6 +4,10 @@ import { faGithub, faGitlab, faGoogle } from "@fortawesome/free-brands-svg-icons import { faEnvelope } from "@fortawesome/free-regular-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { RegionSelect } from "@app/components/navigation/RegionSelect"; +import { useServerConfig } from "@app/context"; +import { LoginMethod } from "@app/hooks/api/admin/types"; + import { Button } from "../v2"; export default function InitialSignupStep({ @@ -12,67 +16,80 @@ export default function InitialSignupStep({ setIsSignupWithEmail: (value: boolean) => void; }) { const { t } = useTranslation(); + const { config } = useServerConfig(); + + const shouldDisplaySignupMethod = (method: LoginMethod) => + !config.enabledLoginMethods || config.enabledLoginMethods.includes(method); return (

{t("signup.initial-title")}

-
- -
-
- -
-
- -
-
- -
+ + {shouldDisplaySignupMethod(LoginMethod.GOOGLE) && ( +
+ +
+ )} + {shouldDisplaySignupMethod(LoginMethod.GITHUB) && ( +
+ +
+ )} + {shouldDisplaySignupMethod(LoginMethod.GITLAB) && ( +
+ +
+ )} + {shouldDisplaySignupMethod(LoginMethod.EMAIL) && ( +
+ +
+ )}
{t("signup.create-policy")}
diff --git a/frontend/src/components/signup/TeamInviteStep.tsx b/frontend/src/components/signup/TeamInviteStep.tsx index a06fc35684..60276d2173 100644 --- a/frontend/src/components/signup/TeamInviteStep.tsx +++ b/frontend/src/components/signup/TeamInviteStep.tsx @@ -2,7 +2,7 @@ import React, { useState } from "react"; import { useTranslation } from "react-i18next"; import { useRouter } from "next/router"; -import { useAddUserToOrg } from "@app/hooks/api"; +import { useAddUsersToOrg } from "@app/hooks/api"; import { useFetchServerStatus } from "@app/hooks/api/serverDetails"; import { usePopUp } from "@app/hooks/usePopUp"; @@ -17,7 +17,7 @@ export default function TeamInviteStep(): JSX.Element { const [emails, setEmails] = useState(""); const { data: serverDetails } = useFetchServerStatus(); - const { mutateAsync } = useAddUserToOrg(); + const { mutateAsync } = useAddUsersToOrg(); const { handlePopUpToggle, popUp, handlePopUpOpen } = usePopUp(["setUpEmail"] as const); // Redirect user to the getting started page @@ -31,8 +31,9 @@ export default function TeamInviteStep(): JSX.Element { .map((email) => email.trim()) .map(async (email) => { mutateAsync({ - inviteeEmail: email, - organizationId: String(localStorage.getItem("orgData.id")) + inviteeEmails: [email], + organizationId: String(localStorage.getItem("orgData.id")), + organizationRoleSlug: "member" }); }); @@ -59,7 +60,7 @@ export default function TeamInviteStep(): JSX.Element { placeholder="email@example.com, email2@example.com..." />
-
+
{hideTitle ? null : ( -
+
{defaultTitle}
)} @@ -81,7 +81,7 @@ const AlertDescription = forwardRef< HTMLParagraphElement, React.HTMLAttributes >(({ className, ...props }, ref) => ( -
+
)); AlertDescription.displayName = "AlertDescription"; diff --git a/frontend/src/components/v2/Checkbox/Checkbox.tsx b/frontend/src/components/v2/Checkbox/Checkbox.tsx index 0e45813725..6a10869967 100644 --- a/frontend/src/components/v2/Checkbox/Checkbox.tsx +++ b/frontend/src/components/v2/Checkbox/Checkbox.tsx @@ -1,5 +1,5 @@ import { ReactNode } from "react"; -import { faCheck } from "@fortawesome/free-solid-svg-icons"; +import { faCheck, faMinus } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import * as CheckboxPrimitive from "@radix-ui/react-checkbox"; import { twMerge } from "tailwind-merge"; @@ -14,6 +14,9 @@ export type CheckboxProps = Omit< isChecked?: boolean; isRequired?: boolean; checkIndicatorBg?: string | undefined; + isError?: boolean; + isIndeterminate?: boolean; + containerClassName?: string; }; export const Checkbox = ({ @@ -24,10 +27,13 @@ export const Checkbox = ({ isDisabled, isRequired, checkIndicatorBg, + isError, + isIndeterminate, + containerClassName, ...props }: CheckboxProps): JSX.Element => { return ( -
+
- + {isIndeterminate ? ( + + ) : ( + + )} -
} className="mb-0" @@ -94,9 +96,10 @@ export const DeleteActionModal = ({ setInputData(e.target.value)} - placeholder="Type to delete..." + placeholder={`Type ${deleteKey} here`} /> + {children} diff --git a/frontend/src/components/v2/Dropdown/Dropdown.tsx b/frontend/src/components/v2/Dropdown/Dropdown.tsx index 99a148bbe5..a2d5f0322f 100644 --- a/frontend/src/components/v2/Dropdown/Dropdown.tsx +++ b/frontend/src/components/v2/Dropdown/Dropdown.tsx @@ -24,7 +24,7 @@ export const DropdownMenuContent = forwardRef @@ -86,13 +86,15 @@ export const DropdownMenuItem = ({ icon, as: Item = "button", iconPos = "left", + isDisabled = false, ...props -}: DropdownMenuItemProps & ComponentPropsWithRef) => ( +}: DropdownMenuItemProps & ComponentPropsWithRef & { isDisabled?: boolean }) => ( diff --git a/frontend/src/components/v2/EmptyState/EmptyState.tsx b/frontend/src/components/v2/EmptyState/EmptyState.tsx index e285500c1c..9816a3fe2c 100644 --- a/frontend/src/components/v2/EmptyState/EmptyState.tsx +++ b/frontend/src/components/v2/EmptyState/EmptyState.tsx @@ -21,7 +21,7 @@ export const EmptyState = ({ }: Props) => (
diff --git a/frontend/src/components/v2/FilterableSelect/FilterableSelect.tsx b/frontend/src/components/v2/FilterableSelect/FilterableSelect.tsx new file mode 100644 index 0000000000..5333d79d2a --- /dev/null +++ b/frontend/src/components/v2/FilterableSelect/FilterableSelect.tsx @@ -0,0 +1,104 @@ +import Select, { + ClearIndicatorProps, + components, + DropdownIndicatorProps, + MultiValueRemoveProps, + OptionProps, + Props +} from "react-select"; +import { faCheckCircle, faCircleXmark } from "@fortawesome/free-regular-svg-icons"; +import { faChevronDown, faXmark } from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { twMerge } from "tailwind-merge"; + +const DropdownIndicator = (props: DropdownIndicatorProps) => { + return ( + + + + ); +}; + +const ClearIndicator = (props: ClearIndicatorProps) => { + return ( + + + + ); +}; + +const MultiValueRemove = (props: MultiValueRemoveProps) => { + return ( + + + + ); +}; + +const Option = ({ isSelected, children, ...props }: OptionProps) => { + return ( + + {children} + {isSelected && ( + + )} + + ); +}; + +export const FilterableSelect = ({ isMulti, closeMenuOnSelect, ...props }: Props) => ( + Promise; + onClose?: () => void; + onChange?: (isOpen: boolean) => void; + isOpen?: boolean; + subTitle?: string; + buttonText?: string; +}; + +export const LeaveProjectModal = ({ + isOpen, + onClose, + onChange, + deleteKey, + onLeaveApproved, + title, + subTitle, + buttonText = "Leave Project" +}: Props): JSX.Element => { + const [inputData, setInputData] = useState(""); + const [isLoading, setIsLoading] = useToggle(); + + useEffect(() => { + setInputData(""); + }, [isOpen]); + + const onDelete = async () => { + setIsLoading.on(); + try { + await onLeaveApproved(); + } catch { + setIsLoading.off(); + } finally { + setIsLoading.off(); + } + }; + + return ( + { + setInputData(""); + if (onChange) onChange(isOpenState); + }} + > + + + + + {" "} +
+ } + onClose={onClose} + > +
{ + evt.preventDefault(); + if (deleteKey === inputData) onDelete(); + }} + > + + Type {deleteKey} to leave the project +
+ } + className="mb-0" + > + setInputData(e.target.value)} + placeholder="Type to confirm..." + /> + + + + + ); +}; diff --git a/frontend/src/components/v2/LeaveProjectModal/index.tsx b/frontend/src/components/v2/LeaveProjectModal/index.tsx new file mode 100644 index 0000000000..5d33aa1083 --- /dev/null +++ b/frontend/src/components/v2/LeaveProjectModal/index.tsx @@ -0,0 +1 @@ +export { LeaveProjectModal } from "./LeaveProjectModal"; diff --git a/frontend/src/components/v2/Modal/Modal.tsx b/frontend/src/components/v2/Modal/Modal.tsx index 90abfecbd0..a741d6d025 100644 --- a/frontend/src/components/v2/Modal/Modal.tsx +++ b/frontend/src/components/v2/Modal/Modal.tsx @@ -11,13 +11,24 @@ export type ModalContentProps = DialogPrimitive.DialogContentProps & { title?: ReactNode; subTitle?: ReactNode; footerContent?: ReactNode; + bodyClassName?: string; onClose?: () => void; overlayClassName?: string; }; export const ModalContent = forwardRef( ( - { children, title, subTitle, className, overlayClassName, footerContent, onClose, ...props }, + { + children, + title, + subTitle, + className, + overlayClassName, + footerContent, + bodyClassName, + onClose, + ...props + }, forwardedRef ) => ( @@ -35,7 +46,10 @@ export const ModalContent = forwardRef( style={{ maxHeight: "90%" }} > {title && {title}} - + {children} {footerContent && {footerContent}} diff --git a/frontend/src/components/v2/Pagination/Pagination.tsx b/frontend/src/components/v2/Pagination/Pagination.tsx index c8afb389b5..51eed6396d 100644 --- a/frontend/src/components/v2/Pagination/Pagination.tsx +++ b/frontend/src/components/v2/Pagination/Pagination.tsx @@ -1,3 +1,4 @@ +import { ReactElement } from "react"; import { faCaretDown, faCheck, @@ -23,6 +24,7 @@ export type PaginationProps = { onChangePerPage: (newRows: number) => void; className?: string; perPageList?: number[]; + startAdornment?: ReactElement; }; export const Pagination = ({ @@ -32,7 +34,8 @@ export const Pagination = ({ onChangePage, onChangePerPage, perPageList = [10, 20, 50, 100], - className + className, + startAdornment }: PaginationProps) => { const prevPageNumber = Math.max(1, page - 1); const canGoPrev = page > 1; @@ -40,17 +43,20 @@ export const Pagination = ({ const upperLimit = Math.ceil(count / perPage); const nextPageNumber = Math.min(upperLimit, page + 1); const canGoNext = page + 1 <= upperLimit; + const canGoFirst = page > 1; + const canGoLast = page < upperLimit; return (
-
+ {startAdornment} +
- {(page - 1) * perPage} - {(page - 1) * perPage + perPage} of {count} + {(page - 1) * perPage + 1} - {Math.min((page - 1) * perPage + perPage, count)} of {count}
@@ -73,6 +79,16 @@ export const Pagination = ({
+ onChangePage(1)} + isDisabled={!canGoFirst} + > + + + + onChangePage(upperLimit)} + isDisabled={!canGoLast} + > + + +
); diff --git a/frontend/src/components/v2/SecretInput/SecretInput.tsx b/frontend/src/components/v2/SecretInput/SecretInput.tsx index 5e7bbef786..6818b0c37b 100644 --- a/frontend/src/components/v2/SecretInput/SecretInput.tsx +++ b/frontend/src/components/v2/SecretInput/SecretInput.tsx @@ -4,7 +4,7 @@ import { twMerge } from "tailwind-merge"; import { useToggle } from "@app/hooks"; -const REGEX = /(\${([^}]+)})/g; +const REGEX = /(\${([a-zA-Z0-9-_.]+)})/g; const replaceContentWithDot = (str: string) => { let finalStr = ""; for (let i = 0; i < str.length; i += 1) { @@ -15,7 +15,7 @@ const replaceContentWithDot = (str: string) => { }; const syntaxHighlight = (content?: string | null, isVisible?: boolean, isImport?: boolean) => { - if (isImport) return "IMPORTED"; + if (isImport && !content) return "IMPORTED"; if (content === "") return "EMPTY"; if (!content) return "EMPTY"; if (!isVisible) return replaceContentWithDot(content); diff --git a/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx b/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx index 9dfb5ff623..bbe63bc6e0 100644 --- a/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx +++ b/frontend/src/components/v2/SecretPathInput/SecretPathInput.tsx @@ -33,7 +33,7 @@ export const SecretPathInput = ({ const [suggestions, setSuggestions] = useState([]); const [isInputFocused, setIsInputFocus] = useState(false); const [highlightedIndex, setHighlightedIndex] = useState(-1); - const debouncedInputValue = useDebounce(inputValue, 200); + const [debouncedInputValue] = useDebounce(inputValue, 200); const { currentWorkspace } = useWorkspace(); const workspaceId = currentWorkspace?.id || ""; @@ -78,7 +78,8 @@ export const SecretPathInput = ({ const validPaths = inputValue.split("/"); validPaths.pop(); - const newValue = `${validPaths.join("/")}/${suggestions[selectedIndex]}/`; + // removed trailing slash + const newValue = `${validPaths.join("/")}/${suggestions[selectedIndex]}`; onChange?.(newValue); setInputValue(newValue); setSecretPath(newValue); diff --git a/frontend/src/components/v2/Select/Select.tsx b/frontend/src/components/v2/Select/Select.tsx index 29dba23c72..cfc70450b1 100644 --- a/frontend/src/components/v2/Select/Select.tsx +++ b/frontend/src/components/v2/Select/Select.tsx @@ -12,6 +12,7 @@ type Props = { placeholder?: string; className?: string; dropdownContainerClassName?: string; + containerClassName?: string; isLoading?: boolean; position?: "item-aligned" | "popper"; isDisabled?: boolean; @@ -31,66 +32,79 @@ export const Select = forwardRef( isDisabled, dropdownContainerClassName, position, + containerClassName, ...props }, ref ): JSX.Element => { return ( - - - - {props.icon ? : placeholder} - +
+ { + if (!props.onValueChange) return; - - - - - - + - -
- -
-
- - {isLoading ? ( -
- - Loading... -
- ) : ( - children +
+ {props.icon && } + +
+ + + + +
+ + - -
- -
-
-
-
-
+ position={position} + style={{ width: "var(--radix-select-trigger-width)" }} + > + +
+ +
+
+ + {isLoading ? ( +
+ + Loading... +
+ ) : ( + children + )} +
+ +
+ +
+
+ + + +
); } ); @@ -110,11 +124,10 @@ export const SelectItem = forwardRef( {...props} className={twMerge( `relative mb-0.5 flex - cursor-pointer select-none items-center rounded-md py-2 pl-10 pr-4 text-sm - outline-none transition-all hover:bg-mineshaft-500 data-[highlighted]:bg-mineshaft-700/80`, + cursor-pointer select-none items-center overflow-hidden text-ellipsis whitespace-nowrap rounded-md py-2 + pl-10 pr-4 text-sm outline-none transition-all hover:bg-mineshaft-500 data-[highlighted]:bg-mineshaft-700/80`, isSelected && "bg-primary", - isDisabled && - "cursor-not-allowed text-gray-600 hover:bg-transparent hover:text-mineshaft-600", + isDisabled && "cursor-not-allowed text-gray-600 opacity-80 hover:!bg-transparent", className )} ref={forwardedRef} @@ -129,3 +142,45 @@ export const SelectItem = forwardRef( ); SelectItem.displayName = "SelectItem"; + +export type SelectClearProps = Omit & { + onClear: () => void; + selectValue: string; +}; + +export const SelectClear = forwardRef( + ( + { children, className, isSelected, isDisabled, onClear, selectValue, ...props }, + forwardedRef + ) => { + return ( + onClear()} + onClick={() => onClear()} + className={twMerge( + `relative mb-0.5 flex + cursor-pointer select-none items-center rounded-md py-2 pl-10 pr-4 text-sm + outline-none transition-all hover:bg-mineshaft-500 data-[highlighted]:bg-mineshaft-700/80`, + isSelected && "bg-primary", + isDisabled && + "cursor-not-allowed text-gray-600 hover:bg-transparent hover:text-mineshaft-600", + className + )} + ref={forwardedRef} + > +
+ +
+ {children} +
+ ); + } +); +SelectClear.displayName = "SelectClear"; diff --git a/frontend/src/components/v2/Select/index.tsx b/frontend/src/components/v2/Select/index.tsx index 6a783605ab..3765851d5c 100644 --- a/frontend/src/components/v2/Select/index.tsx +++ b/frontend/src/components/v2/Select/index.tsx @@ -1,2 +1,2 @@ export type { SelectItemProps, SelectProps } from "./Select"; -export { Select, SelectItem } from "./Select"; +export { Select, SelectClear, SelectItem } from "./Select"; diff --git a/frontend/src/components/v2/Switch/Switch.tsx b/frontend/src/components/v2/Switch/Switch.tsx index a54657fc61..ce955354ab 100644 --- a/frontend/src/components/v2/Switch/Switch.tsx +++ b/frontend/src/components/v2/Switch/Switch.tsx @@ -8,6 +8,7 @@ export type SwitchProps = Omit ( -
+
- ); -}; diff --git a/frontend/src/components/v2/UpgradeOverlay/index.tsx b/frontend/src/components/v2/UpgradeOverlay/index.tsx deleted file mode 100644 index 1a74fb6f12..0000000000 --- a/frontend/src/components/v2/UpgradeOverlay/index.tsx +++ /dev/null @@ -1 +0,0 @@ -export { UpgradeOverlay } from "./UpgradeOverlay"; diff --git a/frontend/src/components/v2/UpgradeProjectAlert/UpgradeProjectAlert.tsx b/frontend/src/components/v2/UpgradeProjectAlert/UpgradeProjectAlert.tsx deleted file mode 100644 index ebcba1d87c..0000000000 --- a/frontend/src/components/v2/UpgradeProjectAlert/UpgradeProjectAlert.tsx +++ /dev/null @@ -1,168 +0,0 @@ -import { useCallback, useState } from "react"; -import Link from "next/link"; -import { useRouter } from "next/router"; -import { faWarning } from "@fortawesome/free-solid-svg-icons"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { twMerge } from "tailwind-merge"; - -import { createNotification } from "@app/components/notifications"; -import { useProjectPermission } from "@app/context"; -import { useGetUpgradeProjectStatus, useUpgradeProject } from "@app/hooks/api"; -import { Workspace } from "@app/hooks/api/types"; -import { workspaceKeys } from "@app/hooks/api/workspace/queries"; -import { ProjectVersion } from "@app/hooks/api/workspace/types"; -import { queryClient } from "@app/reactQuery"; - -import { Button } from "../Button"; -import { Tooltip } from "../Tooltip"; - -export type UpgradeProjectAlertProps = { - project: Workspace; - transparent?: boolean; -}; - -export const UpgradeProjectAlert = ({ - project, - transparent -}: UpgradeProjectAlertProps): JSX.Element | null => { - const router = useRouter(); - const { hasProjectRole } = useProjectPermission(); - const upgradeProject = useUpgradeProject(); - const [currentStatus, setCurrentStatus] = useState(null); - const [isUpgrading, setIsUpgrading] = useState(false); - - const isProjectAdmin = hasProjectRole("admin"); - - const { - data: projectStatus, - isLoading: statusIsLoading, - refetch: manualProjectStatusRefetch - } = useGetUpgradeProjectStatus({ - projectId: project.id, - enabled: isProjectAdmin && project.version === ProjectVersion.V1, - refetchInterval: 5_000, - onSuccess: (data) => { - if (!isProjectAdmin) { - return; - } - - if (data && data?.status !== null) { - if (data.status === "IN_PROGRESS") { - setCurrentStatus("Your upgrade is being processed."); - } else if (data.status === "FAILED") { - setCurrentStatus("Upgrade failed, please try again."); - } - } - - if (currentStatus !== null && data?.status === null) { - queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); - router.reload(); - } - } - }); - - const onUpgradeProject = useCallback(async () => { - if (upgradeProject.isLoading) { - return; - } - setIsUpgrading(true); - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY"); - - if (!PRIVATE_KEY) { - createNotification({ - type: "error", - text: "Private key not found" - }); - return; - } - - await upgradeProject.mutateAsync({ - projectId: project.id, - privateKey: PRIVATE_KEY - }); - - manualProjectStatusRefetch(); - - setTimeout(() => setIsUpgrading(false), 5_000); - }, []); - - const isLoading = - isUpgrading || - ((upgradeProject.isLoading || - currentStatus !== null || - (currentStatus === null && statusIsLoading)) && - projectStatus?.status !== "FAILED"); - - if (project.version !== ProjectVersion.V1) return null; - - if (transparent) { - return ( - - ); - } - - return ( - - ); -}; diff --git a/frontend/src/components/v2/UpgradeProjectAlert/index.tsx b/frontend/src/components/v2/UpgradeProjectAlert/index.tsx deleted file mode 100644 index ab67f86cf2..0000000000 --- a/frontend/src/components/v2/UpgradeProjectAlert/index.tsx +++ /dev/null @@ -1 +0,0 @@ -export { UpgradeProjectAlert } from "./UpgradeProjectAlert"; diff --git a/frontend/src/components/v2/index.tsx b/frontend/src/components/v2/index.tsx index 3a5cef86b3..725a27ed33 100644 --- a/frontend/src/components/v2/index.tsx +++ b/frontend/src/components/v2/index.tsx @@ -1,5 +1,6 @@ export * from "./Accordion"; export * from "./Alert"; +export * from "./Badge"; export * from "./Button"; export * from "./Card"; export * from "./Checkbox"; @@ -10,6 +11,7 @@ export * from "./Drawer"; export * from "./Dropdown"; export * from "./EmailServiceSetupModal"; export * from "./EmptyState"; +export * from "./FilterableSelect"; export * from "./FontAwesomeSymbol"; export * from "./FormControl"; export * from "./HoverCardv2"; diff --git a/frontend/src/const.ts b/frontend/src/const.ts index 4d13b4602f..d2623431d3 100644 --- a/frontend/src/const.ts +++ b/frontend/src/const.ts @@ -24,7 +24,8 @@ export const publicPaths = [ "/login/provider/error", // TODO: change "/login/sso", "/admin/signup", - "/shared/secret/[id]" + "/shared/secret/[id]", + "/share-secret" ]; export const languageMap = { @@ -56,6 +57,10 @@ export const plans = plansProd || plansDev; export const leaveConfirmDefaultMessage = "Your changes will be lost if you leave the page. Are you sure you want to continue?"; +export enum SessionStorageKeys { + CLI_TERMINAL_TOKEN = "CLI_TERMINAL_TOKEN" +} + export const secretTagsColors = [ { id: 1, diff --git a/frontend/src/context/OrgPermissionContext/types.ts b/frontend/src/context/OrgPermissionContext/types.ts index 95a9d00acf..41a2e7e3c1 100644 --- a/frontend/src/context/OrgPermissionContext/types.ts +++ b/frontend/src/context/OrgPermissionContext/types.ts @@ -19,7 +19,15 @@ export enum OrgPermissionSubjects { Groups = "groups", Billing = "billing", SecretScanning = "secret-scanning", - Identity = "identity" + Identity = "identity", + Kms = "kms", + AdminConsole = "organization-admin-console", + AuditLogs = "audit-logs", + ProjectTemplates = "project-templates" +} + +export enum OrgPermissionAdminConsoleAction { + AccessAllProjects = "access-all-projects" } export type OrgPermissionSet = @@ -35,6 +43,10 @@ export type OrgPermissionSet = | [OrgPermissionActions, OrgPermissionSubjects.Groups] | [OrgPermissionActions, OrgPermissionSubjects.SecretScanning] | [OrgPermissionActions, OrgPermissionSubjects.Billing] - | [OrgPermissionActions, OrgPermissionSubjects.Identity]; + | [OrgPermissionActions, OrgPermissionSubjects.Identity] + | [OrgPermissionActions, OrgPermissionSubjects.Kms] + | [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole] + | [OrgPermissionActions, OrgPermissionSubjects.AuditLogs] + | [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]; export type TOrgPermission = MongoAbility; diff --git a/frontend/src/context/ProjectPermissionContext/index.tsx b/frontend/src/context/ProjectPermissionContext/index.tsx index dca8dc1682..4b04f5cb84 100644 --- a/frontend/src/context/ProjectPermissionContext/index.tsx +++ b/frontend/src/context/ProjectPermissionContext/index.tsx @@ -1,3 +1,8 @@ export { ProjectPermissionProvider, useProjectPermission } from "./ProjectPermissionContext"; export type { ProjectPermissionSet, TProjectPermission } from "./types"; -export { ProjectPermissionActions, ProjectPermissionSub } from "./types"; +export { + ProjectPermissionActions, + ProjectPermissionCmekActions, + ProjectPermissionDynamicSecretActions, + ProjectPermissionSub +} from "./types"; diff --git a/frontend/src/context/ProjectPermissionContext/types.ts b/frontend/src/context/ProjectPermissionContext/types.ts index 79c8f2d302..307ef74af3 100644 --- a/frontend/src/context/ProjectPermissionContext/types.ts +++ b/frontend/src/context/ProjectPermissionContext/types.ts @@ -7,6 +7,47 @@ export enum ProjectPermissionActions { Delete = "delete" } +export enum ProjectPermissionDynamicSecretActions { + ReadRootCredential = "read-root-credential", + CreateRootCredential = "create-root-credential", + EditRootCredential = "edit-root-credential", + DeleteRootCredential = "delete-root-credential", + Lease = "lease" +} + +export enum ProjectPermissionCmekActions { + Read = "read", + Create = "create", + Edit = "edit", + Delete = "delete", + Encrypt = "encrypt", + Decrypt = "decrypt" +} + +export enum PermissionConditionOperators { + $IN = "$in", + $ALL = "$all", + $REGEX = "$regex", + $EQ = "$eq", + $NEQ = "$ne", + $GLOB = "$glob" +} + +export type TPermissionConditionOperators = { + [PermissionConditionOperators.$IN]: string[]; + [PermissionConditionOperators.$ALL]: string[]; + [PermissionConditionOperators.$EQ]: string; + [PermissionConditionOperators.$NEQ]: string; + [PermissionConditionOperators.$REGEX]: string; + [PermissionConditionOperators.$GLOB]: string; +}; + +export type TPermissionCondition = Record< + string, + | string + | { $in: string[]; $all: string[]; $regex: string; $eq: string; $ne: string; $glob: string } +>; + export enum ProjectPermissionSub { Role = "role", Member = "member", @@ -19,15 +60,42 @@ export enum ProjectPermissionSub { Tags = "tags", AuditLogs = "audit-logs", IpAllowList = "ip-allowlist", - Workspace = "workspace", + Project = "workspace", Secrets = "secrets", + SecretFolders = "secret-folders", + SecretImports = "secret-imports", + DynamicSecrets = "dynamic-secrets", SecretRollback = "secret-rollback", SecretApproval = "secret-approval", SecretRotation = "secret-rotation", - Identity = "identity" + Identity = "identity", + CertificateAuthorities = "certificate-authorities", + Certificates = "certificates", + CertificateTemplates = "certificate-templates", + PkiAlerts = "pki-alerts", + PkiCollections = "pki-collections", + Kms = "kms", + Cmek = "cmek" } -type SubjectFields = { +export type SecretSubjectFields = { + environment: string; + secretPath: string; + secretName: string; + secretTags: string[]; +}; + +export type SecretFolderSubjectFields = { + environment: string; + secretPath: string; +}; + +export type DynamicSecretSubjectFields = { + environment: string; + secretPath: string; +}; + +export type SecretImportSubjectFields = { environment: string; secretPath: string; }; @@ -35,7 +103,31 @@ type SubjectFields = { export type ProjectPermissionSet = | [ ProjectPermissionActions, - ProjectPermissionSub.Secrets | (ForcedSubject & SubjectFields) + ( + | ProjectPermissionSub.Secrets + | (ForcedSubject & SecretSubjectFields) + ) + ] + | [ + ProjectPermissionActions, + ( + | ProjectPermissionSub.SecretFolders + | (ForcedSubject & SecretFolderSubjectFields) + ) + ] + | [ + ProjectPermissionDynamicSecretActions, + ( + | ProjectPermissionSub.DynamicSecrets + | (ForcedSubject & DynamicSecretSubjectFields) + ) + ] + | [ + ProjectPermissionActions, + ( + | ProjectPermissionSub.SecretImports + | (ForcedSubject & SecretImportSubjectFields) + ) ] | [ProjectPermissionActions, ProjectPermissionSub.Role] | [ProjectPermissionActions, ProjectPermissionSub.Tags] @@ -47,13 +139,19 @@ export type ProjectPermissionSet = | [ProjectPermissionActions, ProjectPermissionSub.Environments] | [ProjectPermissionActions, ProjectPermissionSub.IpAllowList] | [ProjectPermissionActions, ProjectPermissionSub.Settings] - | [ProjectPermissionActions, ProjectPermissionSub.Identity] | [ProjectPermissionActions, ProjectPermissionSub.ServiceTokens] | [ProjectPermissionActions, ProjectPermissionSub.SecretApproval] | [ProjectPermissionActions, ProjectPermissionSub.SecretRotation] - | [ProjectPermissionActions.Delete, ProjectPermissionSub.Workspace] - | [ProjectPermissionActions.Edit, ProjectPermissionSub.Workspace] + | [ProjectPermissionActions, ProjectPermissionSub.Identity] + | [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities] + | [ProjectPermissionActions, ProjectPermissionSub.Certificates] + | [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates] + | [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts] + | [ProjectPermissionActions, ProjectPermissionSub.PkiCollections] + | [ProjectPermissionActions.Delete, ProjectPermissionSub.Project] + | [ProjectPermissionActions.Edit, ProjectPermissionSub.Project] | [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback] - | [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]; - + | [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback] + | [ProjectPermissionCmekActions, ProjectPermissionSub.Cmek] + | [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]; export type TProjectPermission = MongoAbility; diff --git a/frontend/src/context/UserContext/UserContext.tsx b/frontend/src/context/UserContext/UserContext.tsx index 433fc39597..0e23ccb7cf 100644 --- a/frontend/src/context/UserContext/UserContext.tsx +++ b/frontend/src/context/UserContext/UserContext.tsx @@ -25,6 +25,21 @@ export const UserProvider = ({ children }: Props): JSX.Element => { }; }, [data, isLoading]); + if (isLoading) { + return ( +
+ infisical loading indicator +
+ ); + } + return {children}; }; diff --git a/frontend/src/context/WorkspaceContext/WorkspaceContext.tsx b/frontend/src/context/WorkspaceContext/WorkspaceContext.tsx index 4cf8c585f3..29ecacaa5d 100644 --- a/frontend/src/context/WorkspaceContext/WorkspaceContext.tsx +++ b/frontend/src/context/WorkspaceContext/WorkspaceContext.tsx @@ -1,6 +1,7 @@ -import { createContext, ReactNode, useContext, useMemo } from "react"; +import { createContext, ReactNode, useContext, useEffect, useMemo } from "react"; import { useRouter } from "next/router"; +import { createNotification } from "@app/components/notifications"; import { useGetUserWorkspaces } from "@app/hooks/api"; import { Workspace } from "@app/hooks/api/workspace/types"; @@ -31,6 +32,34 @@ export const WorkspaceProvider = ({ children }: Props): JSX.Element => { }; }, [ws, workspaceId, isLoading]); + const shouldTriggerNoProjectAccess = + !value.isLoading && + !value.currentWorkspace && + router.pathname.startsWith("/project") && + workspaceId; + + // handle redirects for project-specific routes + useEffect(() => { + if (shouldTriggerNoProjectAccess) { + createNotification({ + text: "You are not a member of this project.", + type: "info" + }); + + setTimeout(() => { + router.push("/"); + }, 5000); + } + }, [shouldTriggerNoProjectAccess, router]); + + if (shouldTriggerNoProjectAccess) { + return ( +
+ You do not have sufficient access to this project. +
+ ); + } + return {children}; }; diff --git a/frontend/src/context/index.tsx b/frontend/src/context/index.tsx index 2b3b6ddf5f..91dae5d2d2 100644 --- a/frontend/src/context/index.tsx +++ b/frontend/src/context/index.tsx @@ -10,6 +10,8 @@ export { export type { TProjectPermission } from "./ProjectPermissionContext"; export { ProjectPermissionActions, + ProjectPermissionCmekActions, + ProjectPermissionDynamicSecretActions, ProjectPermissionProvider, ProjectPermissionSub, useProjectPermission diff --git a/frontend/src/helpers/parseEnvVar.ts b/frontend/src/helpers/parseEnvVar.ts new file mode 100644 index 0000000000..27640b515c --- /dev/null +++ b/frontend/src/helpers/parseEnvVar.ts @@ -0,0 +1,14 @@ +/** Extracts the key and value from a passed in env string based on the provided delimiters. */ +export const getKeyValue = (pastedContent: string, delimiters: string[]) => { + const foundDelimiter = delimiters.find((delimiter) => pastedContent.includes(delimiter)); + + if (!foundDelimiter) { + return { key: pastedContent.trim(), value: "" }; + } + + const [key, value] = pastedContent.split(foundDelimiter); + return { + key: key.trim(), + value: (value ?? "").trim() + }; +}; diff --git a/frontend/src/helpers/policies.ts b/frontend/src/helpers/policies.ts new file mode 100644 index 0000000000..c6d7c935ae --- /dev/null +++ b/frontend/src/helpers/policies.ts @@ -0,0 +1,12 @@ +import { PolicyType } from "@app/hooks/api/policies/enums"; + +export const policyDetails: Record = { + [PolicyType.AccessPolicy]: { + className: "bg-lime-900 text-lime-100", + name: "Access Policy" + }, + [PolicyType.ChangePolicy]: { + className: "bg-indigo-900 text-indigo-100", + name: "Change Policy" + } +}; \ No newline at end of file diff --git a/frontend/src/helpers/project.ts b/frontend/src/helpers/project.ts index ff8e700a4e..b6338ce355 100644 --- a/frontend/src/helpers/project.ts +++ b/frontend/src/helpers/project.ts @@ -1,81 +1,40 @@ -import encryptSecrets from "@app/components/utilities/secrets/encryptSecrets"; -import { createSecret } from "@app/hooks/api/secrets/mutations"; +import { apiRequest } from "@app/config/request"; import { createWorkspace } from "@app/hooks/api/workspace/queries"; const secretsToBeAdded = [ { - pos: 0, - key: "DATABASE_URL", + secretKey: "DATABASE_URL", // eslint-disable-next-line no-template-curly-in-string - value: "mongodb+srv://${DB_USERNAME}:${DB_PASSWORD}@mongodb.net", - valueOverride: undefined, - comment: "Secret referencing example", - id: "", - tags: [] + secretValue: "mongodb+srv://${DB_USERNAME}:${DB_PASSWORD}@mongodb.net", + secretComment: "Secret referencing example" }, { - pos: 1, - key: "DB_USERNAME", - value: "OVERRIDE_THIS", - valueOverride: undefined, - comment: "Override secrets with personal value", - id: "", - tags: [] + secretKey: "DB_USERNAME", + secretValue: "OVERRIDE_THIS", + secretComment: "Override secrets with personal value" }, { - pos: 2, - key: "DB_PASSWORD", - value: "OVERRIDE_THIS", - valueOverride: undefined, - comment: "Another secret override", - id: "", - tags: [] + secretKey: "DB_PASSWORD", + secretValue: "OVERRIDE_THIS", + secretComment: "Another secret override" }, { - pos: 3, - key: "DB_USERNAME", - value: "user1234", - valueOverride: "user1234", - comment: "", - id: "", - tags: [] + secretKey: "DB_PASSWORD", + secretValue: "example_password" }, { - pos: 4, - key: "DB_PASSWORD", - value: "example_password", - valueOverride: "example_password", - comment: "", - id: "", - tags: [] + secretKey: "TWILIO_AUTH_TOKEN", + secretValue: "example_twillio_token" }, { - pos: 5, - key: "TWILIO_AUTH_TOKEN", - value: "example_twillio_token", - valueOverride: undefined, - comment: "", - id: "", - tags: [] - }, - { - pos: 6, - key: "WEBSITE_URL", - value: "http://localhost:3000", - valueOverride: undefined, - comment: "", - id: "", - tags: [] + secretKey: "WEBSITE_URL", + secretValue: "http://localhost:3000" } ]; /** * Create and initialize a new project in organization with id [organizationId] * Note: current user should be a member of the organization - * @param {Object} obj - * @param {String} obj.organizationId - id of organization - * @param {String} obj.projectName - name of new project - * @returns {Project} project - new project */ const initProjectHelper = async ({ projectName }: { projectName: string }) => { // create new project @@ -85,35 +44,14 @@ const initProjectHelper = async ({ projectName }: { projectName: string }) => { projectName }); - // encrypt and upload secrets to new project - const secrets = await encryptSecrets({ - secretsToEncrypt: secretsToBeAdded, - workspaceId: project.id, - env: "dev" - }); - try { - secrets?.forEach((secret) => { - createSecret({ - workspaceId: project.id, - environment: secret.environment, - type: secret.type, - secretKey: secret.secretName, - secretKeyCiphertext: secret.secretKeyCiphertext, - secretKeyIV: secret.secretKeyIV, - secretKeyTag: secret.secretKeyTag, - secretValueCiphertext: secret.secretValueCiphertext, - secretValueIV: secret.secretValueIV, - secretValueTag: secret.secretValueTag, - secretCommentCiphertext: secret.secretCommentCiphertext, - secretCommentIV: secret.secretCommentIV, - secretCommentTag: secret.secretCommentTag, - secretPath: "/", - metadata: { - source: "signup" - } - }); + const { data } = await apiRequest.post("/api/v3/secrets/batch/raw", { + workspaceId: project.id, + environment: "dev", + secretPath: "/", + secrets: secretsToBeAdded }); + return data; } catch (err) { console.error("Failed to upload secrets", err); } diff --git a/frontend/src/helpers/reverseTruncate.ts b/frontend/src/helpers/reverseTruncate.ts new file mode 100644 index 0000000000..eba5f21d31 --- /dev/null +++ b/frontend/src/helpers/reverseTruncate.ts @@ -0,0 +1,5 @@ +export const reverseTruncate = (text: string, maxLength = 42) => { + if (text.length < maxLength) return text; + + return `...${text.substring(text.length - maxLength + 3)}`; +}; diff --git a/frontend/src/helpers/roles.ts b/frontend/src/helpers/roles.ts new file mode 100644 index 0000000000..de6291a131 --- /dev/null +++ b/frontend/src/helpers/roles.ts @@ -0,0 +1,25 @@ +import { ProjectMembershipRole } from "@app/hooks/api/roles/types"; + +enum OrgMembershipRole { + Admin = "admin", + Member = "member", + NoAccess = "no-access" +} + +enum ProjectMemberRole { + Admin = "admin", + Member = "member", + Viewer = "viewer", + NoAccess = "no-access" +} + +export const isCustomOrgRole = (slug: string) => + !Object.values(OrgMembershipRole).includes(slug as OrgMembershipRole); + +export const formatProjectRoleName = (name: string) => { + if (name === ProjectMemberRole.Member) return "developer"; + return name; +}; + +export const isCustomProjectRole = (slug: string) => + !Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole); diff --git a/frontend/src/helpers/secret.ts b/frontend/src/helpers/secret.ts deleted file mode 100644 index 607fa4268c..0000000000 --- a/frontend/src/helpers/secret.ts +++ /dev/null @@ -1,175 +0,0 @@ -import path from "path"; - -import { decryptSymmetric } from "@app/components/utilities/cryptography/crypto"; -import { fetchProjectEncryptedSecrets } from "@app/hooks/api/secrets/queries"; - -const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g; -export const interpolateSecrets = ({ - projectId, - secretEncKey -}: { - projectId: string; - secretEncKey: string; -}) => { - const fetchSecretsCrossEnv = () => { - const fetchCache: Record> = {}; - - return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => { - const secRefPathUrl = path.join("/", ...secRefPath); - const uniqKey = `${secRefEnv}-${secRefPathUrl}`; - - if (fetchCache?.[uniqKey]) { - return fetchCache[uniqKey][secRefKey]; - } - - // get secrets by projectId, env, path - const encryptedSecrets = await fetchProjectEncryptedSecrets({ - workspaceId: projectId, - environment: secRefEnv, - secretPath: secRefPathUrl - }); - - const decryptedSec = encryptedSecrets.reduce>((prev, secret) => { - const secretKey = decryptSymmetric({ - ciphertext: secret.secretKeyCiphertext, - iv: secret.secretKeyIV, - tag: secret.secretKeyTag, - key: secretEncKey - }); - const secretValue = decryptSymmetric({ - ciphertext: secret.secretValueCiphertext, - iv: secret.secretValueIV, - tag: secret.secretValueTag, - key: secretEncKey - }); - - // eslint-disable-next-line - prev[secretKey] = secretValue; - return prev; - }, {}); - - fetchCache[uniqKey] = decryptedSec; - - return fetchCache[uniqKey][secRefKey]; - }; - }; - - const recursivelyExpandSecret = async ( - expandedSec: Record, - interpolatedSec: Record, - fetchCrossEnv: (env: string, secPath: string[], secKey: string) => Promise, - recursionChainBreaker: Record, - key: string - ) => { - if (expandedSec?.[key] !== undefined) { - return expandedSec[key]; - } - if (recursionChainBreaker?.[key]) { - return ""; - } - // eslint-disable-next-line - recursionChainBreaker[key] = true; - - let interpolatedValue = interpolatedSec[key]; - if (!interpolatedValue) { - // eslint-disable-next-line no-console - console.error(`Couldn't find referenced value - ${key}`); - return ""; - } - - const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG); - if (refs) { - await Promise.all( - refs.map(async (interpolationSyntax) => { - const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1); - const entities = interpolationKey.trim().split("."); - - if (entities.length === 1) { - const val = await recursivelyExpandSecret( - expandedSec, - interpolatedSec, - fetchCrossEnv, - recursionChainBreaker, - interpolationKey - ); - if (val) { - interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val); - } - return; - } - - if (entities.length > 1) { - const secRefEnv = entities[0]; - const secRefPath = entities.slice(1, entities.length - 1); - const secRefKey = entities[entities.length - 1]; - - const val = await fetchCrossEnv(secRefEnv, secRefPath, secRefKey); - if (val) { - interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val); - } - } - }) - ); - } - - // eslint-disable-next-line - expandedSec[key] = interpolatedValue; - return interpolatedValue; - }; - - // used to convert multi line ones to quotes ones with \n - const formatMultiValueEnv = (val?: string) => { - if (!val) return ""; - if (!val.match("\n")) return val; - return `"${val.replace(/\n/g, "\\n")}"`; - }; - - const expandSecrets = async ( - secrets: Record - ) => { - const expandedSec: Record = {}; - const interpolatedSec: Record = {}; - - const crossSecEnvFetch = fetchSecretsCrossEnv(); - - Object.keys(secrets).forEach((key) => { - if (secrets[key].value.match(INTERPOLATION_SYNTAX_REG)) { - interpolatedSec[key] = secrets[key].value; - } else { - expandedSec[key] = secrets[key].value; - } - }); - - await Promise.all( - Object.keys(secrets).map(async (key) => { - if (expandedSec?.[key]) { - // should not do multi line encoding if user has set it to skip - // eslint-disable-next-line - secrets[key].value = secrets[key].skipMultilineEncoding - ? expandedSec[key] - : formatMultiValueEnv(expandedSec[key]); - return; - } - - // this is to avoid recursion loop. So the graph should be direct graph rather than cyclic - // so for any recursion building if there is an entity two times same key meaning it will be looped - const recursionChainBreaker: Record = {}; - const expandedVal = await recursivelyExpandSecret( - expandedSec, - interpolatedSec, - crossSecEnvFetch, - recursionChainBreaker, - key - ); - - // eslint-disable-next-line - secrets[key].value = secrets[key].skipMultilineEncoding - ? expandedVal - : formatMultiValueEnv(expandedVal); - }) - ); - - return secrets; - }; - return expandSecrets; -}; diff --git a/frontend/src/helpers/string.ts b/frontend/src/helpers/string.ts index 8f581a2802..bd97d8a3c5 100644 --- a/frontend/src/helpers/string.ts +++ b/frontend/src/helpers/string.ts @@ -3,3 +3,12 @@ export const removeTrailingSlash = (str: string) => { return str.endsWith("/") ? str.slice(0, -1) : str; }; + +export const isValidPath = (val: string): boolean => { + if (val.length === 0) return false; + if (val === "/") return true; + + // Check for valid characters and no consecutive slashes + const validPathRegex = /^[a-zA-Z0-9-_.:]+(?:\/[a-zA-Z0-9-_.:]+)*$/; + return validPathRegex.test(val); +} \ No newline at end of file diff --git a/frontend/src/hoc/withProjectPermission/withProjectPermission.tsx b/frontend/src/hoc/withProjectPermission/withProjectPermission.tsx index 103ff61b7b..22c91ab52e 100644 --- a/frontend/src/hoc/withProjectPermission/withProjectPermission.tsx +++ b/frontend/src/hoc/withProjectPermission/withProjectPermission.tsx @@ -1,31 +1,29 @@ import { ComponentType } from "react"; -import { Abilities, AbilityTuple, Generics, SubjectType } from "@casl/ability"; +import { AbilityTuple } from "@casl/ability"; import { faLock } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { twMerge } from "tailwind-merge"; -import { TProjectPermission, useProjectPermission } from "@app/context"; +import { useProjectPermission } from "@app/context"; +import { ProjectPermissionSet } from "@app/context/ProjectPermissionContext"; -type Props = (T extends AbilityTuple - ? { - action: T[0]; - subject: Extract; - } - : { - action: string; - subject: string; - }) & { className?: string; containerClassName?: string }; +type Props = { + className?: string; + containerClassName?: string; + action: T[0]; + subject: T[1]; +}; -export const withProjectPermission = ( - Component: ComponentType, - { action, subject, className, containerClassName }: Props["abilities"]> +export const withProjectPermission = ( + Component: ComponentType, "action" | "subject"> & T>, + { action, subject, className, containerClassName }: Props ) => { - const HOC = (hocProps: T) => { + const HOC = (hocProps: Omit, "action" | "subject"> & T) => { const { permission } = useProjectPermission(); // akhilmhdh: Set as any due to casl/react ts type bug // REASON: casl due to its type checking can't seem to union even if union intersection is applied - if (permission.cannot(action as any, subject)) { + if (permission.cannot(action as any, subject as any)) { return (
{ const queryClient = useQueryClient(); return useMutation<{}, {}, TCreateAccessPolicyDTO>({ - mutationFn: async ({ environment, projectSlug, approvals, approvers, name, secretPath }) => { + mutationFn: async ({ + environment, + projectSlug, + approvals, + approvers, + name, + secretPath, + enforcementLevel + }) => { const { data } = await apiRequest.post("/api/v1/access-approvals/policies", { environment, projectSlug, approvals, approvers, secretPath, - name + name, + enforcementLevel }); return data; }, @@ -37,12 +46,13 @@ export const useUpdateAccessApprovalPolicy = () => { const queryClient = useQueryClient(); return useMutation<{}, {}, TUpdateAccessPolicyDTO>({ - mutationFn: async ({ id, approvers, approvals, name, secretPath }) => { + mutationFn: async ({ id, approvers, approvals, name, secretPath, enforcementLevel }) => { const { data } = await apiRequest.patch(`/api/v1/access-approvals/policies/${id}`, { approvals, approvers, secretPath, - name + name, + enforcementLevel }); return data; }, diff --git a/frontend/src/hooks/api/accessApproval/types.ts b/frontend/src/hooks/api/accessApproval/types.ts index 2176b8bc1b..6df2575903 100644 --- a/frontend/src/hooks/api/accessApproval/types.ts +++ b/frontend/src/hooks/api/accessApproval/types.ts @@ -1,3 +1,4 @@ +import { EnforcementLevel, PolicyType } from "../policies/enums"; import { TProjectPermission } from "../roles/types"; import { WorkspaceEnv } from "../workspace/types"; @@ -10,14 +11,35 @@ export type TAccessApprovalPolicy = { workspace: string; environment: WorkspaceEnv; projectId: string; - approvers: string[]; + policyType: PolicyType; + approversRequired: boolean; + enforcementLevel: EnforcementLevel; + updatedAt: Date; + approvers?: Approver[]; }; +export enum ApproverType{ + User = "user", + Group = "group" +} + +export type Approver ={ + id: string; + type: ApproverType; +} + export type TAccessApprovalRequest = { id: string; policyId: string; privilegeId: string | null; - requestedBy: string; + requestedByUserId: string; + requestedByUser: { + email: string; + firstName?: string; + lastName?: string; + userId: string; + username: string; + }; createdAt: Date; updatedAt: Date; isTemporary: boolean; @@ -47,6 +69,7 @@ export type TAccessApprovalRequest = { approvers: string[]; secretPath?: string | null; envId: string; + enforcementLevel: EnforcementLevel; }; reviewers: { @@ -116,18 +139,20 @@ export type TCreateAccessPolicyDTO = { projectSlug: string; name?: string; environment: string; - approvers?: string[]; + approvers?: Approver[]; approvals?: number; secretPath?: string; + enforcementLevel?: EnforcementLevel; }; export type TUpdateAccessPolicyDTO = { id: string; name?: string; - approvers?: string[]; + approvers?: Approver[]; secretPath?: string; environment?: string; approvals?: number; + enforcementLevel?: EnforcementLevel; // for invalidating list projectSlug: string; }; diff --git a/frontend/src/hooks/api/admin/index.ts b/frontend/src/hooks/api/admin/index.ts index e1c4301d46..5405878c7c 100644 --- a/frontend/src/hooks/api/admin/index.ts +++ b/frontend/src/hooks/api/admin/index.ts @@ -1,2 +1,13 @@ -export { useCreateAdminUser, useUpdateServerConfig } from "./mutation"; -export { useGetServerConfig } from "./queries"; +export { + useAdminDeleteUser, + useCreateAdminUser, + useUpdateAdminSlackConfig, + useUpdateServerConfig, + useUpdateServerEncryptionStrategy +} from "./mutation"; +export { + useAdminGetUsers, + useGetAdminSlackConfig, + useGetServerConfig, + useGetServerRootKmsEncryptionDetails +} from "./queries"; diff --git a/frontend/src/hooks/api/admin/mutation.ts b/frontend/src/hooks/api/admin/mutation.ts index 6d25944ef4..6cd13050ee 100644 --- a/frontend/src/hooks/api/admin/mutation.ts +++ b/frontend/src/hooks/api/admin/mutation.ts @@ -4,8 +4,14 @@ import { apiRequest } from "@app/config/request"; import { organizationKeys } from "../organization/queries"; import { User } from "../users/types"; -import { adminQueryKeys } from "./queries"; -import { TCreateAdminUserDTO, TServerConfig } from "./types"; +import { adminQueryKeys, adminStandaloneKeys } from "./queries"; +import { + AdminSlackConfig, + RootKeyEncryptionStrategy, + TCreateAdminUserDTO, + TServerConfig, + TUpdateAdminSlackConfigDTO +} from "./types"; export const useCreateAdminUser = () => { const queryClient = useQueryClient(); @@ -28,7 +34,11 @@ export const useCreateAdminUser = () => { export const useUpdateServerConfig = () => { const queryClient = useQueryClient(); - return useMutation>({ + return useMutation< + TServerConfig, + {}, + Partial + >({ mutationFn: async (opt) => { const { data } = await apiRequest.patch<{ config: TServerConfig }>( "/api/v1/admin/config", @@ -43,3 +53,48 @@ export const useUpdateServerConfig = () => { } }); }; + +export const useAdminDeleteUser = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (userId: string) => { + await apiRequest.delete(`/api/v1/admin/user-management/users/${userId}`); + + return {}; + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: [adminStandaloneKeys.getUsers] + }); + } + }); +}; + +export const useUpdateAdminSlackConfig = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (dto) => { + const { data } = await apiRequest.put( + "/api/v1/admin/integrations/slack/config", + dto + ); + + return data; + }, + onSuccess: () => { + queryClient.invalidateQueries(adminQueryKeys.getAdminSlackConfig()); + } + }); +}; + +export const useUpdateServerEncryptionStrategy = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (strategy: RootKeyEncryptionStrategy) => { + await apiRequest.patch("/api/v1/admin/encryption-strategies", { strategy }); + }, + onSuccess: () => { + queryClient.invalidateQueries(adminQueryKeys.getServerEncryptionStrategies()); + } + }); +}; diff --git a/frontend/src/hooks/api/admin/queries.ts b/frontend/src/hooks/api/admin/queries.ts index f64c8bfa9d..a1d32bce35 100644 --- a/frontend/src/hooks/api/admin/queries.ts +++ b/frontend/src/hooks/api/admin/queries.ts @@ -1,11 +1,24 @@ -import { useQuery, UseQueryOptions } from "@tanstack/react-query"; +import { useInfiniteQuery, useQuery, UseQueryOptions } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; -import { TServerConfig } from "./types"; +import { User } from "../types"; +import { + AdminGetUsersFilters, + AdminSlackConfig, + TGetServerRootKmsEncryptionDetails, + TServerConfig +} from "./types"; + +export const adminStandaloneKeys = { + getUsers: "get-users" +}; export const adminQueryKeys = { - serverConfig: () => ["server-config"] as const + serverConfig: () => ["server-config"] as const, + getUsers: (filters: AdminGetUsersFilters) => [adminStandaloneKeys.getUsers, { filters }] as const, + getAdminSlackConfig: () => ["admin-slack-config"] as const, + getServerEncryptionStrategies: () => ["server-encryption-strategies"] as const }; const fetchServerConfig = async () => { @@ -32,3 +45,50 @@ export const useGetServerConfig = ({ ...options, enabled: options?.enabled ?? true }); + +export const useAdminGetUsers = (filters: AdminGetUsersFilters) => { + return useInfiniteQuery({ + queryKey: adminQueryKeys.getUsers(filters), + queryFn: async ({ pageParam }) => { + const { data } = await apiRequest.get<{ users: User[] }>( + "/api/v1/admin/user-management/users", + { + params: { + ...filters, + offset: pageParam + } + } + ); + + return data.users; + }, + getNextPageParam: (lastPage, pages) => + lastPage.length !== 0 ? pages.length * filters.limit : undefined + }); +}; + +export const useGetAdminSlackConfig = () => { + return useQuery({ + queryKey: adminQueryKeys.getAdminSlackConfig(), + queryFn: async () => { + const { data } = await apiRequest.get( + "/api/v1/admin/integrations/slack/config" + ); + + return data; + } + }); +}; + +export const useGetServerRootKmsEncryptionDetails = () => { + return useQuery({ + queryKey: adminQueryKeys.getServerEncryptionStrategies(), + queryFn: async () => { + const { data } = await apiRequest.get( + "/api/v1/admin/encryption-strategies" + ); + + return data; + } + }); +}; diff --git a/frontend/src/hooks/api/admin/types.ts b/frontend/src/hooks/api/admin/types.ts index 6a42e6ed0e..60fa3ab988 100644 --- a/frontend/src/hooks/api/admin/types.ts +++ b/frontend/src/hooks/api/admin/types.ts @@ -1,3 +1,13 @@ +export enum LoginMethod { + EMAIL = "email", + GOOGLE = "google", + GITHUB = "github", + GITLAB = "gitlab", + SAML = "saml", + LDAP = "ldap", + OIDC = "oidc" +} + export type TServerConfig = { initialized: boolean; allowSignUp: boolean; @@ -5,11 +15,18 @@ export type TServerConfig = { isMigrationModeOn?: boolean; trustSamlEmails: boolean; trustLdapEmails: boolean; + trustOidcEmails: boolean; isSecretScanningDisabled: boolean; + defaultAuthOrgSlug: string | null; + defaultAuthOrgId: string | null; + defaultAuthOrgAuthMethod?: string | null; + defaultAuthOrgAuthEnforced?: boolean | null; + enabledLoginMethods: LoginMethod[]; }; export type TCreateAdminUserDTO = { email: string; + password: string; firstName: string; lastName?: string; protectedKey: string; @@ -22,3 +39,30 @@ export type TCreateAdminUserDTO = { verifier: string; salt: string; }; + +export type TUpdateAdminSlackConfigDTO = { + clientId: string; + clientSecret: string; +}; + +export type AdminGetUsersFilters = { + limit: number; + searchTerm: string; +}; + +export type AdminSlackConfig = { + clientId: string; + clientSecret: string; +}; + +export type TGetServerRootKmsEncryptionDetails = { + strategies: { + strategy: RootKeyEncryptionStrategy; + enabled: boolean; + }[]; +}; + +export enum RootKeyEncryptionStrategy { + Software = "SOFTWARE", + HSM = "HSM" +} diff --git a/frontend/src/hooks/api/apiKeys/queries.tsx b/frontend/src/hooks/api/apiKeys/queries.tsx index d379174359..c1a2e43558 100644 --- a/frontend/src/hooks/api/apiKeys/queries.tsx +++ b/frontend/src/hooks/api/apiKeys/queries.tsx @@ -2,7 +2,7 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; -import { userKeys } from "../users/queries"; +import { userKeys } from "../users"; import { APIKeyDataV2, CreateAPIKeyDataV2DTO, diff --git a/frontend/src/hooks/api/auditLogs/constants.tsx b/frontend/src/hooks/api/auditLogs/constants.tsx index 3487003749..4045929080 100644 --- a/frontend/src/hooks/api/auditLogs/constants.tsx +++ b/frontend/src/hooks/api/auditLogs/constants.tsx @@ -26,7 +26,6 @@ export const eventToNameMap: { [K in EventType]: string } = { [EventType.CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET]: "Create universal auth client secret", [EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET]: "Revoke universal auth client secret", [EventType.GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS]: "Get universal auth client secrets", - [EventType.GET_IDENTITY_UNIVERSAL_AUTH]: "Get universal auth", [EventType.CREATE_ENVIRONMENT]: "Create environment", [EventType.UPDATE_ENVIRONMENT]: "Update environment", [EventType.DELETE_ENVIRONMENT]: "Delete environment", @@ -43,7 +42,45 @@ export const eventToNameMap: { [K in EventType]: string } = { [EventType.UPDATE_SECRET_IMPORT]: "Update secret import", [EventType.DELETE_SECRET_IMPORT]: "Delete secret import", [EventType.UPDATE_USER_WORKSPACE_DENIED_PERMISSIONS]: "Update denied permissions", - [EventType.UPDATE_USER_WORKSPACE_ROLE]: "Update user role" + [EventType.UPDATE_USER_WORKSPACE_ROLE]: "Update user role", + [EventType.CREATE_CA]: "Create CA", + [EventType.GET_CA]: "Get CA", + [EventType.UPDATE_CA]: "Update CA", + [EventType.DELETE_CA]: "Delete CA", + [EventType.GET_CA_CSR]: "Get CA CSR", + [EventType.GET_CA_CERT]: "Get CA certificate", + [EventType.SIGN_INTERMEDIATE]: "Sign intermediate", + [EventType.IMPORT_CA_CERT]: "Import CA certificate", + [EventType.GET_CA_CRL]: "Get CA CRL", + [EventType.ISSUE_CERT]: "Issue certificate", + [EventType.GET_CERT]: "Get certificate", + [EventType.DELETE_CERT]: "Delete certificate", + [EventType.REVOKE_CERT]: "Revoke certificate", + [EventType.GET_CERT_BODY]: "Get certificate body", + [EventType.CREATE_PKI_ALERT]: "Create PKI alert", + [EventType.GET_PKI_ALERT]: "Get PKI alert", + [EventType.UPDATE_PKI_ALERT]: "Update PKI alert", + [EventType.DELETE_PKI_ALERT]: "Delete PKI alert", + [EventType.CREATE_PKI_COLLECTION]: "Create PKI collection", + [EventType.GET_PKI_COLLECTION]: "Get PKI collection", + [EventType.UPDATE_PKI_COLLECTION]: "Update PKI collection", + [EventType.DELETE_PKI_COLLECTION]: "Delete PKI collection", + [EventType.GET_PKI_COLLECTION_ITEMS]: "Get PKI collection items", + [EventType.ADD_PKI_COLLECTION_ITEM]: "Add PKI collection item", + [EventType.DELETE_PKI_COLLECTION_ITEM]: "Delete PKI collection item", + [EventType.ORG_ADMIN_ACCESS_PROJECT]: "Org admin accessed project", + [EventType.CREATE_CERTIFICATE_TEMPLATE]: "Create certificate template", + [EventType.UPDATE_CERTIFICATE_TEMPLATE]: "Update certificate template", + [EventType.DELETE_CERTIFICATE_TEMPLATE]: "Delete certificate template", + [EventType.GET_CERTIFICATE_TEMPLATE]: "Get certificate template", + [EventType.GET_CERTIFICATE_TEMPLATE_EST_CONFIG]: "Get certificate template EST configuration", + [EventType.CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG]: + "Create certificate template EST configuration", + [EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG]: + "Update certificate template EST configuration", + [EventType.UPDATE_PROJECT_SLACK_CONFIG]: "Update project slack configuration", + [EventType.GET_PROJECT_SLACK_CONFIG]: "Get project slack configuration", + [EventType.INTEGRATION_SYNCED]: "Integration sync" }; export const userAgentTTypeoNameMap: { [K in UserAgentType]: string } = { diff --git a/frontend/src/hooks/api/auditLogs/enums.tsx b/frontend/src/hooks/api/auditLogs/enums.tsx index f6ccd8f81c..1db55d7396 100644 --- a/frontend/src/hooks/api/auditLogs/enums.tsx +++ b/frontend/src/hooks/api/auditLogs/enums.tsx @@ -1,4 +1,5 @@ export enum ActorType { + PLATFORM = "platform", USER = "user", SERVICE = "service", IDENTITY = "identity" @@ -56,5 +57,41 @@ export enum EventType { UPDATE_SECRET_IMPORT = "update-secret-import", DELETE_SECRET_IMPORT = "delete-secret-import", UPDATE_USER_WORKSPACE_ROLE = "update-user-workspace-role", - UPDATE_USER_WORKSPACE_DENIED_PERMISSIONS = "update-user-workspace-denied-permissions" + UPDATE_USER_WORKSPACE_DENIED_PERMISSIONS = "update-user-workspace-denied-permissions", + CREATE_CA = "create-certificate-authority", + GET_CA = "get-certificate-authority", + UPDATE_CA = "update-certificate-authority", + DELETE_CA = "delete-certificate-authority", + GET_CA_CSR = "get-certificate-authority-csr", + GET_CA_CERT = "get-certificate-authority-cert", + SIGN_INTERMEDIATE = "sign-intermediate", + IMPORT_CA_CERT = "import-certificate-authority-cert", + GET_CA_CRL = "get-certificate-authority-crl", + ISSUE_CERT = "issue-cert", + GET_CERT = "get-cert", + DELETE_CERT = "delete-cert", + REVOKE_CERT = "revoke-cert", + GET_CERT_BODY = "get-cert-body", + CREATE_PKI_ALERT = "create-pki-alert", + GET_PKI_ALERT = "get-pki-alert", + UPDATE_PKI_ALERT = "update-pki-alert", + DELETE_PKI_ALERT = "delete-pki-alert", + CREATE_PKI_COLLECTION = "create-pki-collection", + GET_PKI_COLLECTION = "get-pki-collection", + UPDATE_PKI_COLLECTION = "update-pki-collection", + DELETE_PKI_COLLECTION = "delete-pki-collection", + GET_PKI_COLLECTION_ITEMS = "get-pki-collection-items", + ADD_PKI_COLLECTION_ITEM = "add-pki-collection-item", + DELETE_PKI_COLLECTION_ITEM = "delete-pki-collection-item", + ORG_ADMIN_ACCESS_PROJECT = "org-admin-accessed-project", + CREATE_CERTIFICATE_TEMPLATE = "create-certificate-template", + UPDATE_CERTIFICATE_TEMPLATE = "update-certificate-template", + DELETE_CERTIFICATE_TEMPLATE = "delete-certificate-template", + GET_CERTIFICATE_TEMPLATE = "get-certificate-template", + CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "create-certificate-template-est-config", + UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "update-certificate-template-est-config", + GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config", + UPDATE_PROJECT_SLACK_CONFIG = "update-project-slack-config", + GET_PROJECT_SLACK_CONFIG = "get-project-slack-config", + INTEGRATION_SYNCED = "integration-synced" } diff --git a/frontend/src/hooks/api/auditLogs/queries.tsx b/frontend/src/hooks/api/auditLogs/queries.tsx index 3517d44350..5ec69b0e72 100644 --- a/frontend/src/hooks/api/auditLogs/queries.tsx +++ b/frontend/src/hooks/api/auditLogs/queries.tsx @@ -1,41 +1,76 @@ -import { useInfiniteQuery, useQuery } from "@tanstack/react-query"; +import { useInfiniteQuery, UseInfiniteQueryOptions, useQuery } from "@tanstack/react-query"; +import { AxiosError } from "axios"; +import { createNotification } from "@app/components/notifications"; import { apiRequest } from "@app/config/request"; -import { Actor, AuditLog, AuditLogFilters } from "./types"; +import { Actor, AuditLog, TGetAuditLogsFilter } from "./types"; -export const workspaceKeys = { - getAuditLogs: (workspaceId: string, filters: AuditLogFilters) => +export const auditLogKeys = { + getAuditLogs: (workspaceId: string | null, filters: TGetAuditLogsFilter) => [{ workspaceId, filters }, "audit-logs"] as const, getAuditLogActorFilterOpts: (workspaceId: string) => [{ workspaceId }, "audit-log-actor-filters"] as const }; -export const useGetAuditLogs = (workspaceId: string, filters: AuditLogFilters) => { +export const useGetAuditLogs = ( + filters: TGetAuditLogsFilter, + projectId: string | null, + options: Omit< + UseInfiniteQueryOptions< + AuditLog[], + unknown, + AuditLog[], + AuditLog[], + ReturnType + >, + "queryFn" | "queryKey" | "getNextPageParam" + > = {} +) => { return useInfiniteQuery({ - queryKey: workspaceKeys.getAuditLogs(workspaceId, filters), + queryKey: auditLogKeys.getAuditLogs(projectId, filters), queryFn: async ({ pageParam }) => { - const { data } = await apiRequest.get<{ auditLogs: AuditLog[] }>( - `/api/v1/workspace/${workspaceId}/audit-logs`, - { - params: { - ...filters, - offset: pageParam, - startDate: filters?.startDate?.toISOString(), - endDate: filters?.endDate?.toISOString() + try { + const { data } = await apiRequest.get<{ auditLogs: AuditLog[] }>( + "/api/v1/organization/audit-logs", + { + params: { + ...filters, + offset: pageParam, + startDate: filters?.startDate?.toISOString(), + endDate: filters?.endDate?.toISOString(), + ...(filters.eventMetadata && Object.keys(filters.eventMetadata).length + ? { + eventMetadata: Object.entries(filters.eventMetadata) + .map(([key, value]) => `${key}=${value}`) + .join(",") + } + : {}), + ...(filters.eventType?.length ? { eventType: filters.eventType.join(",") } : {}), + ...(projectId ? { projectId } : {}) + } } + ); + return data.auditLogs; + } catch (error) { + if (error instanceof AxiosError) { + createNotification({ + type: "error", + text: error.response?.data.message + }); } - ); - return data.auditLogs; + return []; + } }, getNextPageParam: (lastPage, pages) => - lastPage.length !== 0 ? pages.length * filters.limit : undefined + lastPage.length !== 0 ? pages.length * filters.limit : undefined, + ...options }); }; export const useGetAuditLogActorFilterOpts = (workspaceId: string) => { return useQuery({ - queryKey: workspaceKeys.getAuditLogActorFilterOpts(workspaceId), + queryKey: auditLogKeys.getAuditLogActorFilterOpts(workspaceId), queryFn: async () => { const { data } = await apiRequest.get<{ actors: Actor[] }>( `/api/v1/workspace/${workspaceId}/audit-logs/filters/actors` diff --git a/frontend/src/hooks/api/auditLogs/types.tsx b/frontend/src/hooks/api/auditLogs/types.tsx index 92611c8143..567e1d0b20 100644 --- a/frontend/src/hooks/api/auditLogs/types.tsx +++ b/frontend/src/hooks/api/auditLogs/types.tsx @@ -1,6 +1,20 @@ +import { CaStatus } from "../ca"; import { IdentityTrustedIp } from "../identities/types"; +import { PkiItemType } from "../pkiCollections/constants"; import { ActorType, EventType, UserAgentType } from "./enums"; +export type TGetAuditLogsFilter = { + eventType?: EventType[]; + userAgentType?: UserAgentType; + eventMetadata?: Record; + actorType?: ActorType; + projectId?: string; + actorId?: string; // user ID format + startDate?: Date; + endDate?: Date; + limit: number; +}; + interface UserActorMetadata { userId: string; email: string; @@ -31,7 +45,13 @@ export interface IdentityActor { metadata: IdentityActorMetadata; } -export type Actor = UserActor | ServiceActor | IdentityActor; +export interface PlatformActorMetadata {} +export interface PlatformActor { + type: ActorType.PLATFORM; + metadata: PlatformActorMetadata; +} + +export type Actor = UserActor | ServiceActor | IdentityActor | PlatformActor; interface GetSecretsEvent { type: EventType.GET_SECRETS; @@ -361,7 +381,6 @@ interface CreateWebhookEvent { webhookId: string; environment: string; secretPath: string; - webhookUrl: string; isDisabled: boolean; }; } @@ -372,7 +391,6 @@ interface UpdateWebhookStatusEvent { webhookId: string; environment: string; secretPath: string; - webhookUrl: string; isDisabled: boolean; }; } @@ -383,7 +401,6 @@ interface DeleteWebhookEvent { webhookId: string; environment: string; secretPath: string; - webhookUrl: string; isDisabled: boolean; }; } @@ -462,6 +479,322 @@ interface UpdateUserDeniedPermissions { }; } +interface CreateCa { + type: EventType.CREATE_CA; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCa { + type: EventType.GET_CA; + metadata: { + caId: string; + dn: string; + }; +} + +interface UpdateCa { + type: EventType.UPDATE_CA; + metadata: { + caId: string; + dn: string; + status: CaStatus; + }; +} + +interface DeleteCa { + type: EventType.DELETE_CA; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCaCsr { + type: EventType.GET_CA_CSR; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCaCert { + type: EventType.GET_CA_CERT; + metadata: { + caId: string; + dn: string; + }; +} + +interface SignIntermediate { + type: EventType.SIGN_INTERMEDIATE; + metadata: { + caId: string; + dn: string; + serialNumber: string; + }; +} + +interface ImportCaCert { + type: EventType.IMPORT_CA_CERT; + metadata: { + caId: string; + dn: string; + }; +} + +interface GetCaCrl { + type: EventType.GET_CA_CRL; + metadata: { + caId: string; + dn: string; + }; +} + +interface IssueCert { + type: EventType.ISSUE_CERT; + metadata: { + caId: string; + dn: string; + serialNumber: string; + }; +} + +interface GetCert { + type: EventType.GET_CERT; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface DeleteCert { + type: EventType.DELETE_CERT; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface RevokeCert { + type: EventType.REVOKE_CERT; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface GetCertBody { + type: EventType.GET_CERT_BODY; + metadata: { + certId: string; + cn: string; + serialNumber: string; + }; +} + +interface CreatePkiAlert { + type: EventType.CREATE_PKI_ALERT; + metadata: { + pkiAlertId: string; + pkiCollectionId: string; + name: string; + alertBeforeDays: number; + recipientEmails: string; + }; +} +interface GetPkiAlert { + type: EventType.GET_PKI_ALERT; + metadata: { + pkiAlertId: string; + }; +} + +interface UpdatePkiAlert { + type: EventType.UPDATE_PKI_ALERT; + metadata: { + pkiAlertId: string; + pkiCollectionId?: string; + name?: string; + alertBeforeDays?: number; + recipientEmails?: string; + }; +} +interface DeletePkiAlert { + type: EventType.DELETE_PKI_ALERT; + metadata: { + pkiAlertId: string; + }; +} + +interface CreatePkiCollection { + type: EventType.CREATE_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + name: string; + }; +} + +interface GetPkiCollection { + type: EventType.GET_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + }; +} + +interface UpdatePkiCollection { + type: EventType.UPDATE_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + name?: string; + }; +} + +interface DeletePkiCollection { + type: EventType.DELETE_PKI_COLLECTION; + metadata: { + pkiCollectionId: string; + }; +} + +interface GetPkiCollectionItems { + type: EventType.GET_PKI_COLLECTION_ITEMS; + metadata: { + pkiCollectionId: string; + }; +} + +interface AddPkiCollectionItem { + type: EventType.ADD_PKI_COLLECTION_ITEM; + metadata: { + pkiCollectionItemId: string; + pkiCollectionId: string; + type: PkiItemType; + itemId: string; + }; +} + +interface DeletePkiCollectionItem { + type: EventType.DELETE_PKI_COLLECTION_ITEM; + metadata: { + pkiCollectionItemId: string; + pkiCollectionId: string; + }; +} + +interface OrgAdminAccessProjectEvent { + type: EventType.ORG_ADMIN_ACCESS_PROJECT; + metadata: { + userId: string; + username: string; + email: string; + projectId: string; + }; // no metadata yet +} + +interface CreateCertificateTemplate { + type: EventType.CREATE_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + caId: string; + pkiCollectionId?: string; + name: string; + commonName: string; + subjectAlternativeName: string; + ttl: string; + }; +} + +interface GetCertificateTemplate { + type: EventType.GET_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + }; +} + +interface UpdateCertificateTemplate { + type: EventType.UPDATE_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + caId: string; + pkiCollectionId?: string; + name: string; + commonName: string; + subjectAlternativeName: string; + ttl: string; + }; +} + +interface DeleteCertificateTemplate { + type: EventType.DELETE_CERTIFICATE_TEMPLATE; + metadata: { + certificateTemplateId: string; + }; +} + +interface CreateCertificateTemplateEstConfig { + type: EventType.CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG; + metadata: { + certificateTemplateId: string; + isEnabled: boolean; + }; +} + +interface UpdateCertificateTemplateEstConfig { + type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG; + metadata: { + certificateTemplateId: string; + isEnabled: boolean; + }; +} + +interface GetCertificateTemplateEstConfig { + type: EventType.GET_CERTIFICATE_TEMPLATE_EST_CONFIG; + metadata: { + certificateTemplateId: string; + }; +} + +interface UpdateProjectSlackConfig { + type: EventType.UPDATE_PROJECT_SLACK_CONFIG; + metadata: { + id: string; + slackIntegrationId: string; + isAccessRequestNotificationEnabled: boolean; + accessRequestChannels: string; + isSecretRequestNotificationEnabled: boolean; + secretRequestChannels: string; + }; +} + +interface GetProjectSlackConfig { + type: EventType.GET_PROJECT_SLACK_CONFIG; + metadata: { + id: string; + }; +} + +export enum IntegrationSyncedEventTrigger { + MANUAL = "manual", + AUTO = "auto" +} + +interface IntegrationSyncedEvent { + type: EventType.INTEGRATION_SYNCED; + metadata: { + integrationId: string; + lastSyncJobId: string; + lastUsed: Date; + syncMessage: string; + isSynced: boolean; + }; +} + export type Event = | GetSecretsEvent | GetSecretEvent @@ -504,7 +837,43 @@ export type Event = | UpdateSecretImportEvent | DeleteSecretImportEvent | UpdateUserRole - | UpdateUserDeniedPermissions; + | UpdateUserDeniedPermissions + | CreateCa + | GetCa + | UpdateCa + | DeleteCa + | GetCaCsr + | GetCaCert + | SignIntermediate + | ImportCaCert + | GetCaCrl + | IssueCert + | GetCert + | DeleteCert + | RevokeCert + | GetCertBody + | CreatePkiAlert + | GetPkiAlert + | UpdatePkiAlert + | DeletePkiAlert + | CreatePkiCollection + | GetPkiCollection + | UpdatePkiCollection + | DeletePkiCollection + | GetPkiCollectionItems + | AddPkiCollectionItem + | DeletePkiCollectionItem + | OrgAdminAccessProjectEvent + | CreateCertificateTemplate + | UpdateCertificateTemplate + | GetCertificateTemplate + | DeleteCertificateTemplate + | UpdateCertificateTemplateEstConfig + | CreateCertificateTemplateEstConfig + | GetCertificateTemplateEstConfig + | UpdateProjectSlackConfig + | GetProjectSlackConfig + | IntegrationSyncedEvent; export type AuditLog = { id: string; @@ -517,13 +886,6 @@ export type AuditLog = { userAgentType: UserAgentType; createdAt: string; updatedAt: string; -}; - -export type AuditLogFilters = { - eventType?: EventType; - userAgentType?: UserAgentType; - actor?: string; - limit: number; - startDate?: Date; - endDate?: Date; + projectName?: string; + projectId?: string; }; diff --git a/frontend/src/hooks/api/auth/index.tsx b/frontend/src/hooks/api/auth/index.tsx index 505f7b05f0..66688cbdcf 100644 --- a/frontend/src/hooks/api/auth/index.tsx +++ b/frontend/src/hooks/api/auth/index.tsx @@ -1,5 +1,6 @@ export { useGetAuthToken, + useOauthTokenExchange, useResetPassword, useSelectOrganization, useSendMfaToken, @@ -7,4 +8,5 @@ export { useSendVerificationEmail, useVerifyMfaToken, useVerifyPasswordResetCode, - useVerifySignupEmailVerificationCode} from "./queries"; + useVerifySignupEmailVerificationCode +} from "./queries"; diff --git a/frontend/src/hooks/api/auth/queries.tsx b/frontend/src/hooks/api/auth/queries.tsx index cba815fae6..de3c60d461 100644 --- a/frontend/src/hooks/api/auth/queries.tsx +++ b/frontend/src/hooks/api/auth/queries.tsx @@ -5,7 +5,7 @@ import { apiRequest } from "@app/config/request"; import { setAuthToken } from "@app/reactQuery"; import { organizationKeys } from "../organization/queries"; -import { workspaceKeys } from "../workspace/queries"; +import { workspaceKeys } from "../workspace"; import { ChangePasswordDTO, CompleteAccountDTO, @@ -19,10 +19,13 @@ import { Login2Res, LoginLDAPDTO, LoginLDAPRes, + MfaMethod, ResetPasswordDTO, SendMfaTokenDTO, SRP1DTO, SRPR1Res, + TOauthTokenExchangeDTO, + UserAgentType, VerifyMfaTokenDTO, VerifyMfaTokenRes, VerifySignupInviteDTO @@ -59,22 +62,29 @@ export const useLogin1 = () => { }); }; -export const selectOrganization = async (data: { organizationId: string }) => { - const { data: res } = await apiRequest.post<{ token: string }>( - "/api/v3/auth/select-organization", - data - ); +export const selectOrganization = async (data: { + organizationId: string; + userAgent?: UserAgentType; +}) => { + const { data: res } = await apiRequest.post<{ + token: string; + isMfaEnabled: boolean; + mfaMethod?: MfaMethod; + }>("/api/v3/auth/select-organization", data); return res; }; export const useSelectOrganization = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async (details: { organizationId: string }) => { + mutationFn: async (details: { organizationId: string; userAgent?: UserAgentType }) => { const data = await selectOrganization(details); - SecurityClient.setToken(data.token); - SecurityClient.setProviderAuthToken(""); + // If a custom user agent is set, then this session is meant for another consuming application, not the web application. + if (!details.userAgent && !data.isMfaEnabled) { + SecurityClient.setToken(data.token); + SecurityClient.setProviderAuthToken(""); + } return data; }, @@ -92,6 +102,7 @@ export const useLogin2 = () => { mutationFn: async (details: { email: string; clientProof: string; + password: string; providerAuthToken?: string; }) => { return login2(details); @@ -99,6 +110,20 @@ export const useLogin2 = () => { }); }; +export const oauthTokenExchange = async (details: TOauthTokenExchangeDTO) => { + const { data } = await apiRequest.post("/api/v1/sso/token-exchange", details); + return data; +}; + +export const useOauthTokenExchange = () => { + // note: use after srp1 + return useMutation({ + mutationFn: async (details: TOauthTokenExchangeDTO) => { + return oauthTokenExchange(details); + } + }); +}; + export const srp1 = async (details: SRP1DTO) => { const { data } = await apiRequest.post("/api/v1/password/srp1", details); return data; @@ -131,10 +156,19 @@ export const useSendMfaToken = () => { }); }; -export const verifyMfaToken = async ({ email, mfaCode }: { email: string; mfaCode: string }) => { +export const verifyMfaToken = async ({ + email, + mfaCode, + mfaMethod +}: { + email: string; + mfaCode: string; + mfaMethod?: string; +}) => { const { data } = await apiRequest.post("/api/v2/auth/mfa/verify", { email, - mfaToken: mfaCode + mfaToken: mfaCode, + mfaMethod }); return data; @@ -142,10 +176,11 @@ export const verifyMfaToken = async ({ email, mfaCode }: { email: string; mfaCod export const useVerifyMfaToken = () => { return useMutation({ - mutationFn: async ({ email, mfaCode }) => { + mutationFn: async ({ email, mfaCode, mfaMethod }) => { return verifyMfaToken({ email, - mfaCode + mfaCode, + mfaMethod }); } }); @@ -279,3 +314,9 @@ export const useGetAuthToken = () => onSuccess: (data) => setAuthToken(data.token), retry: 0 }); + +export const checkUserTotpMfa = async () => { + const { data } = await apiRequest.get<{ isVerified: boolean }>("/api/v2/auth/mfa/check/totp"); + + return data.isVerified; +}; diff --git a/frontend/src/hooks/api/auth/types.ts b/frontend/src/hooks/api/auth/types.ts index ce1b18bc83..fd0604a88b 100644 --- a/frontend/src/hooks/api/auth/types.ts +++ b/frontend/src/hooks/api/auth/types.ts @@ -9,6 +9,7 @@ export type SendMfaTokenDTO = { export type VerifyMfaTokenDTO = { email: string; mfaCode: string; + mfaMethod: MfaMethod; }; export type VerifyMfaTokenRes = { @@ -23,6 +24,11 @@ export type VerifyMfaTokenRes = { tag: string; }; +export type TOauthTokenExchangeDTO = { + providerAuthToken: string; + email: string; +}; + export type Login1DTO = { email: string; clientPublicKey: string; @@ -34,6 +40,7 @@ export type Login2DTO = { email: string; clientProof: string; providerAuthToken?: string; + password: string; }; export type Login1Res = { @@ -86,6 +93,8 @@ export type CompleteAccountDTO = { encryptedPrivateKeyTag: string; salt: string; verifier: string; + password: string; + tokenMetadata?: string; }; export type CompleteAccountSignupDTO = CompleteAccountDTO & { @@ -101,6 +110,7 @@ export type VerifySignupInviteDTO = { }; export type ChangePasswordDTO = { + password: string; clientProof: string; protectedKey: string; protectedKeyIV: string; @@ -136,3 +146,12 @@ export type IssueBackupPrivateKeyDTO = { export type GetBackupEncryptedPrivateKeyDTO = { verificationToken: string; }; + +export enum UserAgentType { + CLI = "cli" +} + +export enum MfaMethod { + EMAIL = "email", + TOTP = "totp" +} diff --git a/frontend/src/hooks/api/ca/constants.tsx b/frontend/src/hooks/api/ca/constants.tsx new file mode 100644 index 0000000000..9bb7b89d5c --- /dev/null +++ b/frontend/src/hooks/api/ca/constants.tsx @@ -0,0 +1,23 @@ +import { CaStatus, CaType } from "./enums"; + +export const caTypeToNameMap: { [K in CaType]: string } = { + [CaType.ROOT]: "Root", + [CaType.INTERMEDIATE]: "Intermediate" +}; + +export const caStatusToNameMap: { [K in CaStatus]: string } = { + [CaStatus.ACTIVE]: "Active", + [CaStatus.DISABLED]: "Disabled", + [CaStatus.PENDING_CERTIFICATE]: "Pending Certificate" +}; + +export const getCaStatusBadgeVariant = (status: CaStatus) => { + switch (status) { + case CaStatus.ACTIVE: + return "success"; + case CaStatus.DISABLED: + return "danger"; + default: + return "primary"; + } +}; diff --git a/frontend/src/hooks/api/ca/enums.tsx b/frontend/src/hooks/api/ca/enums.tsx new file mode 100644 index 0000000000..35d86c4526 --- /dev/null +++ b/frontend/src/hooks/api/ca/enums.tsx @@ -0,0 +1,14 @@ +export enum CaType { + ROOT = "root", + INTERMEDIATE = "intermediate" +} + +export enum CaStatus { + ACTIVE = "active", + DISABLED = "disabled", + PENDING_CERTIFICATE = "pending-certificate" +} + +export enum CaRenewalType { + EXISTING = "existing" +} diff --git a/frontend/src/hooks/api/ca/index.tsx b/frontend/src/hooks/api/ca/index.tsx new file mode 100644 index 0000000000..4993a8fdcb --- /dev/null +++ b/frontend/src/hooks/api/ca/index.tsx @@ -0,0 +1,11 @@ +export { CaRenewalType, CaStatus, CaType } from "./enums"; +export { + useCreateCa, + useCreateCertificate, + useDeleteCa, + useImportCaCertificate, + useRenewCa, + useSignIntermediate, + useUpdateCa +} from "./mutations"; +export { useGetCaById, useGetCaCert, useGetCaCerts, useGetCaCertTemplates,useGetCaCrls, useGetCaCsr } from "./queries"; diff --git a/frontend/src/hooks/api/ca/mutations.tsx b/frontend/src/hooks/api/ca/mutations.tsx new file mode 100644 index 0000000000..0bbec617b1 --- /dev/null +++ b/frontend/src/hooks/api/ca/mutations.tsx @@ -0,0 +1,135 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { workspaceKeys } from "../workspace"; +import { caKeys } from "./queries"; +import { + TCertificateAuthority, + TCreateCaDTO, + TCreateCertificateDTO, + TCreateCertificateResponse, + TDeleteCaDTO, + TImportCaCertificateDTO, + TImportCaCertificateResponse, + TRenewCaDTO, + TRenewCaResponse, + TSignIntermediateDTO, + TSignIntermediateResponse, + TUpdateCaDTO +} from "./types"; + +export const useCreateCa = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (body) => { + const { + data: { ca } + } = await apiRequest.post<{ ca: TCertificateAuthority }>("/api/v1/pki/ca/", body); + return ca; + }, + onSuccess: (_, { projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCas({ projectSlug })); + } + }); +}; + +export const useUpdateCa = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ caId, projectSlug, ...body }) => { + const { + data: { ca } + } = await apiRequest.patch<{ ca: TCertificateAuthority }>(`/api/v1/pki/ca/${caId}`, body); + return ca; + }, + onSuccess: ({ id }, { projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCas({ projectSlug })); + queryClient.invalidateQueries(caKeys.getCaById(id)); + } + }); +}; + +export const useDeleteCa = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ caId }) => { + const { + data: { ca } + } = await apiRequest.delete<{ ca: TCertificateAuthority }>(`/api/v1/pki/ca/${caId}`); + return ca; + }, + onSuccess: (_, { projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCas({ projectSlug })); + } + }); +}; + +export const useSignIntermediate = () => { + // TODO: consider renaming + return useMutation({ + mutationFn: async (body) => { + const { data } = await apiRequest.post( + `/api/v1/pki/ca/${body.caId}/sign-intermediate`, + body + ); + return data; + } + }); +}; + +export const useImportCaCertificate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ caId, ...body }) => { + const { data } = await apiRequest.post( + `/api/v1/pki/ca/${caId}/import-certificate`, + body + ); + return data; + }, + onSuccess: (_, { caId, projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCas({ projectSlug })); + queryClient.invalidateQueries(caKeys.getCaCerts(caId)); + queryClient.invalidateQueries(caKeys.getCaCert(caId)); + } + }); +}; + +// consider rename to issue certificate +export const useCreateCertificate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (body) => { + const { data } = await apiRequest.post( + "/api/v1/pki/certificates/issue-certificate", + body + ); + return data; + }, + onSuccess: (_, { projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.forWorkspaceCertificates(projectSlug)); + } + }); +}; + +export const useRenewCa = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (body) => { + const { data } = await apiRequest.post( + `/api/v1/pki/ca/${body.caId}/renew`, + body + ); + return data; + }, + onSuccess: (_, { caId, projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCas({ projectSlug })); + queryClient.invalidateQueries(caKeys.getCaById(caId)); + queryClient.invalidateQueries(caKeys.getCaCert(caId)); + queryClient.invalidateQueries(caKeys.getCaCerts(caId)); + queryClient.invalidateQueries(caKeys.getCaCsr(caId)); + queryClient.invalidateQueries(caKeys.getCaCrl(caId)); + } + }); +}; diff --git a/frontend/src/hooks/api/ca/queries.tsx b/frontend/src/hooks/api/ca/queries.tsx new file mode 100644 index 0000000000..a1e6337763 --- /dev/null +++ b/frontend/src/hooks/api/ca/queries.tsx @@ -0,0 +1,107 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { TCertificateTemplate } from "../certificateTemplates/types"; +import { TCertificateAuthority } from "./types"; + +export const caKeys = { + getCaById: (caId: string) => [{ caId }, "ca"], + getCaCerts: (caId: string) => [{ caId }, "ca-cert"], + getCaCrls: (caId: string) => [{ caId }, "ca-crls"], + getCaCert: (caId: string) => [{ caId }, "ca-cert"], + getCaCsr: (caId: string) => [{ caId }, "ca-csr"], + getCaCrl: (caId: string) => [{ caId }, "ca-crl"], + getCaCertTemplates: (caId: string) => [{ caId }, "ca-cert-templates"], + getCaEstConfig: (caId: string) => [{ caId }, "ca-est-config"] +}; + +export const useGetCaById = (caId: string) => { + return useQuery({ + queryKey: caKeys.getCaById(caId), + queryFn: async () => { + const { + data: { ca } + } = await apiRequest.get<{ ca: TCertificateAuthority }>(`/api/v1/pki/ca/${caId}`); + return ca; + }, + enabled: Boolean(caId) + }); +}; + +export const useGetCaCerts = (caId: string) => { + return useQuery({ + queryKey: caKeys.getCaCerts(caId), + queryFn: async () => { + const { data } = await apiRequest.get< + { + certificate: string; + certificateChain: string; + serialNumber: string; + version: number; + }[] + >(`/api/v1/pki/ca/${caId}/ca-certificates`); // TODO: consider updating endpoint structure + return data; + }, + enabled: Boolean(caId) + }); +}; + +export const useGetCaCert = (caId: string) => { + return useQuery({ + queryKey: caKeys.getCaCert(caId), + queryFn: async () => { + const { data } = await apiRequest.get<{ + certificate: string; + certificateChain: string; + serialNumber: string; + }>(`/api/v1/pki/ca/${caId}/certificate`); // TODO: consider updating endpoint structure + return data; + }, + enabled: Boolean(caId) + }); +}; + +export const useGetCaCsr = (caId: string) => { + return useQuery({ + queryKey: caKeys.getCaCsr(caId), + queryFn: async () => { + const { + data: { csr } + } = await apiRequest.get<{ + csr: string; + }>(`/api/v1/pki/ca/${caId}/csr`); + return csr; + }, + enabled: Boolean(caId) + }); +}; + +export const useGetCaCrls = (caId: string) => { + return useQuery({ + queryKey: caKeys.getCaCrls(caId), + queryFn: async () => { + const { data } = await apiRequest.get< + { + id: string; + crl: string; + }[] + >(`/api/v1/pki/ca/${caId}/crls`); + return data; + }, + enabled: Boolean(caId) + }); +}; + +export const useGetCaCertTemplates = (caId: string) => { + return useQuery({ + queryKey: caKeys.getCaCertTemplates(caId), + queryFn: async () => { + const { data } = await apiRequest.get<{ + certificateTemplates: TCertificateTemplate[]; + }>(`/api/v1/pki/ca/${caId}/certificate-templates`); + return data; + }, + enabled: Boolean(caId) + }); +}; \ No newline at end of file diff --git a/frontend/src/hooks/api/ca/types.ts b/frontend/src/hooks/api/ca/types.ts new file mode 100644 index 0000000000..25e5112e08 --- /dev/null +++ b/frontend/src/hooks/api/ca/types.ts @@ -0,0 +1,117 @@ +import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "../certificates/enums"; +import { CaRenewalType, CaStatus, CaType } from "./enums"; + +export type TCertificateAuthority = { + id: string; + parentCaId?: string; + projectId: string; + type: CaType; + status: CaStatus; + friendlyName: string; + organization: string; + ou: string; + country: string; + province: string; + locality: string; + commonName: string; + dn: string; + maxPathLength?: number; + notAfter?: string; + notBefore?: string; + keyAlgorithm: CertKeyAlgorithm; + requireTemplateForIssuance: boolean; + activeCaCertId?: string; + createdAt: string; + updatedAt: string; +}; + +export type TCreateCaDTO = { + projectSlug: string; + type: string; + friendlyName?: string; + organization: string; + ou: string; + country: string; + province: string; + locality: string; + commonName: string; + notAfter?: string; + maxPathLength: number; + keyAlgorithm: CertKeyAlgorithm; + requireTemplateForIssuance: boolean; +}; + +export type TUpdateCaDTO = { + projectSlug: string; + caId: string; + status?: CaStatus; + requireTemplateForIssuance?: boolean; +}; + +export type TDeleteCaDTO = { + projectSlug: string; + caId: string; +}; + +export type TSignIntermediateDTO = { + caId: string; + csr: string; + maxPathLength: number; + notBefore?: string; + notAfter?: string; +}; + +export type TSignIntermediateResponse = { + certificate: string; + certificateChain: string; + issuingCaCertificate: string; + serialNumber: string; +}; + +export type TImportCaCertificateDTO = { + caId: string; + projectSlug: string; + certificate: string; + certificateChain: string; +}; + +export type TImportCaCertificateResponse = { + message: string; + caId: string; +}; + +export type TCreateCertificateDTO = { + projectSlug: string; + caId?: string; + certificateTemplateId?: string; + pkiCollectionId?: string; + friendlyName?: string; + commonName: string; + altNames: string; // sans + ttl: string; // string compatible with ms + notBefore?: string; + notAfter?: string; + keyUsages: CertKeyUsage[]; + extendedKeyUsages: CertExtendedKeyUsage[]; +}; + +export type TCreateCertificateResponse = { + certificate: string; + issuingCertificate: string; + certificateChain: string; + privateKey: string; + serialNumber: string; +}; + +export type TRenewCaDTO = { + projectSlug: string; + caId: string; + type: CaRenewalType; + notAfter: string; +}; + +export type TRenewCaResponse = { + certificate: string; + certificateChain: string; + serialNumber: string; +}; diff --git a/frontend/src/hooks/api/certificateTemplates/index.tsx b/frontend/src/hooks/api/certificateTemplates/index.tsx new file mode 100644 index 0000000000..61dfb35a2c --- /dev/null +++ b/frontend/src/hooks/api/certificateTemplates/index.tsx @@ -0,0 +1,8 @@ +export { + useCreateCertTemplate, + useCreateEstConfig, + useDeleteCertTemplate, + useUpdateCertTemplate, + useUpdateEstConfig +} from "./mutations"; +export { useGetCertTemplate, useGetEstConfig } from "./queries"; diff --git a/frontend/src/hooks/api/certificateTemplates/mutations.tsx b/frontend/src/hooks/api/certificateTemplates/mutations.tsx new file mode 100644 index 0000000000..8545cf70c8 --- /dev/null +++ b/frontend/src/hooks/api/certificateTemplates/mutations.tsx @@ -0,0 +1,100 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { caKeys } from "../ca/queries"; +import { workspaceKeys } from "../workspace"; +import { certTemplateKeys } from "./queries"; +import { + TCertificateTemplate, + TCreateCertificateTemplateDTO, + TCreateEstConfigDTO, + TDeleteCertificateTemplateDTO, + TUpdateCertificateTemplateDTO, + TUpdateEstConfigDTO +} from "./types"; + +export const useCreateCertTemplate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (data) => { + const { data: certificateTemplate } = await apiRequest.post( + "/api/v1/pki/certificate-templates", + data + ); + return certificateTemplate; + }, + onSuccess: ({ caId }, { projectId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCertificateTemplates(projectId)); + queryClient.invalidateQueries(caKeys.getCaCertTemplates(caId)); + } + }); +}; + +export const useUpdateCertTemplate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (data) => { + const { data: certificateTemplate } = await apiRequest.patch( + `/api/v1/pki/certificate-templates/${data.id}`, + data + ); + + return certificateTemplate; + }, + onSuccess: ({ caId }, { projectId, id }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCertificateTemplates(projectId)); + queryClient.invalidateQueries(certTemplateKeys.getCertTemplateById(id)); + queryClient.invalidateQueries(caKeys.getCaCertTemplates(caId)); + } + }); +}; + +export const useDeleteCertTemplate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (data) => { + const { data: certificateTemplate } = await apiRequest.delete( + `/api/v1/pki/certificate-templates/${data.id}` + ); + return certificateTemplate; + }, + onSuccess: ({ caId }, { projectId, id }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceCertificateTemplates(projectId)); + queryClient.invalidateQueries(certTemplateKeys.getCertTemplateById(id)); + queryClient.invalidateQueries(caKeys.getCaCertTemplates(caId)); + } + }); +}; + +export const useCreateEstConfig = () => { + const queryClient = useQueryClient(); + return useMutation<{}, {}, TCreateEstConfigDTO>({ + mutationFn: async (body) => { + const { data } = await apiRequest.post( + `/api/v1/pki/certificate-templates/${body.certificateTemplateId}/est-config`, + body + ); + return data; + }, + onSuccess: (_, { certificateTemplateId }) => { + queryClient.invalidateQueries(certTemplateKeys.getEstConfig(certificateTemplateId)); + } + }); +}; + +export const useUpdateEstConfig = () => { + const queryClient = useQueryClient(); + return useMutation<{}, {}, TUpdateEstConfigDTO>({ + mutationFn: async (body) => { + const { data } = await apiRequest.patch( + `/api/v1/pki/certificate-templates/${body.certificateTemplateId}/est-config`, + body + ); + return data; + }, + onSuccess: (_, { certificateTemplateId }) => { + queryClient.invalidateQueries(certTemplateKeys.getEstConfig(certificateTemplateId)); + } + }); +}; diff --git a/frontend/src/hooks/api/certificateTemplates/queries.tsx b/frontend/src/hooks/api/certificateTemplates/queries.tsx new file mode 100644 index 0000000000..7ee5bbd30a --- /dev/null +++ b/frontend/src/hooks/api/certificateTemplates/queries.tsx @@ -0,0 +1,37 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { TCertificateTemplate, TEstConfig } from "./types"; + +export const certTemplateKeys = { + getCertTemplateById: (id: string) => [{ id }, "cert-template"], + getEstConfig: (id: string) => [{ id }, "cert-template-est-config"] +}; + +export const useGetCertTemplate = (id: string) => { + return useQuery({ + queryKey: certTemplateKeys.getCertTemplateById(id), + queryFn: async () => { + const { data: certificateTemplate } = await apiRequest.get( + `/api/v1/pki/certificate-templates/${id}` + ); + return certificateTemplate; + }, + enabled: Boolean(id) + }); +}; + +export const useGetEstConfig = (certificateTemplateId: string) => { + return useQuery({ + queryKey: certTemplateKeys.getEstConfig(certificateTemplateId), + queryFn: async () => { + const { data: estConfig } = await apiRequest.get( + `/api/v1/pki/certificate-templates/${certificateTemplateId}/est-config` + ); + + return estConfig; + }, + enabled: Boolean(certificateTemplateId) + }); +}; diff --git a/frontend/src/hooks/api/certificateTemplates/types.ts b/frontend/src/hooks/api/certificateTemplates/types.ts new file mode 100644 index 0000000000..14367a280e --- /dev/null +++ b/frontend/src/hooks/api/certificateTemplates/types.ts @@ -0,0 +1,69 @@ +import { CertExtendedKeyUsage, CertKeyUsage } from "../certificates/enums"; + +export type TCertificateTemplate = { + id: string; + caId: string; + caName: string; + projectId: string; + pkiCollectionId?: string; + name: string; + commonName: string; + subjectAlternativeName: string; + ttl: string; + keyUsages: CertKeyUsage[]; + extendedKeyUsages: CertExtendedKeyUsage[]; +}; + +export type TCreateCertificateTemplateDTO = { + caId: string; + pkiCollectionId?: string; + name: string; + commonName: string; + subjectAlternativeName: string; + ttl: string; + projectId: string; + keyUsages: CertKeyUsage[]; + extendedKeyUsages: CertExtendedKeyUsage[]; +}; + +export type TUpdateCertificateTemplateDTO = { + id: string; + caId?: string; + pkiCollectionId?: string; + name?: string; + commonName?: string; + subjectAlternativeName?: string; + ttl?: string; + projectId: string; + keyUsages?: CertKeyUsage[]; + extendedKeyUsages?: CertExtendedKeyUsage[]; +}; + +export type TDeleteCertificateTemplateDTO = { + id: string; + projectId: string; +}; + +export type TCreateEstConfigDTO = { + certificateTemplateId: string; + caChain?: string; + passphrase: string; + isEnabled: boolean; + disableBootstrapCertValidation: boolean; +}; + +export type TUpdateEstConfigDTO = { + certificateTemplateId: string; + caChain?: string; + passphrase?: string; + isEnabled?: boolean; + disableBootstrapCertValidation?: boolean; +}; + +export type TEstConfig = { + id: string; + certificateTemplateId: string; + caChain: string; + isEnabled: boolean; + disableBootstrapCertValidation: boolean; +}; diff --git a/frontend/src/hooks/api/certificates/constants.tsx b/frontend/src/hooks/api/certificates/constants.tsx new file mode 100644 index 0000000000..0384ea6cd6 --- /dev/null +++ b/frontend/src/hooks/api/certificates/constants.tsx @@ -0,0 +1,98 @@ +import { + CertExtendedKeyUsage, + CertKeyAlgorithm, + CertKeyUsage, + CertStatus, + CrlReason +} from "./enums"; + +export const certStatusToNameMap: { [K in CertStatus]: string } = { + [CertStatus.ACTIVE]: "Active", + [CertStatus.REVOKED]: "Revoked" +}; + +export const getCertStatusBadgeVariant = (status: CertStatus) => { + switch (status) { + case CertStatus.ACTIVE: + return "success"; + case CertStatus.REVOKED: + return "danger"; + default: + return "primary"; + } +}; + +export const certKeyAlgorithmToNameMap: { [K in CertKeyAlgorithm]: string } = { + [CertKeyAlgorithm.RSA_2048]: "RSA 2048", + [CertKeyAlgorithm.RSA_4096]: "RSA 4096", + [CertKeyAlgorithm.ECDSA_P256]: "ECDSA P256", + [CertKeyAlgorithm.ECDSA_P384]: "ECDSA P384" +}; + +export const certKeyAlgorithms = [ + { label: certKeyAlgorithmToNameMap[CertKeyAlgorithm.RSA_2048], value: CertKeyAlgorithm.RSA_2048 }, + { label: certKeyAlgorithmToNameMap[CertKeyAlgorithm.RSA_4096], value: CertKeyAlgorithm.RSA_4096 }, + { + label: certKeyAlgorithmToNameMap[CertKeyAlgorithm.ECDSA_P256], + value: CertKeyAlgorithm.ECDSA_P256 + }, + { + label: certKeyAlgorithmToNameMap[CertKeyAlgorithm.ECDSA_P384], + value: CertKeyAlgorithm.ECDSA_P384 + } +]; + +export const crlReasonToNameMap: { [K in CrlReason]: string } = { + [CrlReason.UNSPECIFIED]: "Unspecified", + [CrlReason.KEY_COMPROMISE]: "Key Compromise", + [CrlReason.CA_COMPROMISE]: "CA Compromise", + [CrlReason.AFFILIATION_CHANGED]: "Affiliation Changed", + [CrlReason.SUPERSEDED]: "Superseded", + [CrlReason.CESSATION_OF_OPERATION]: "Cessation of Operation", + [CrlReason.CERTIFICATE_HOLD]: "Certificate Hold", + // [CrlReason.REMOVE_FROM_CRL]: "Remove from CRL", + [CrlReason.PRIVILEGE_WITHDRAWN]: "Privilege Withdrawn", + [CrlReason.A_A_COMPROMISE]: "A/A Compromise" +}; + +export const crlReasons = [ + { label: crlReasonToNameMap[CrlReason.UNSPECIFIED], value: CrlReason.UNSPECIFIED }, + { label: crlReasonToNameMap[CrlReason.KEY_COMPROMISE], value: CrlReason.KEY_COMPROMISE }, + { label: crlReasonToNameMap[CrlReason.CA_COMPROMISE], value: CrlReason.CA_COMPROMISE }, + { + label: crlReasonToNameMap[CrlReason.AFFILIATION_CHANGED], + value: CrlReason.AFFILIATION_CHANGED + }, + { label: crlReasonToNameMap[CrlReason.SUPERSEDED], value: CrlReason.SUPERSEDED }, + { + label: crlReasonToNameMap[CrlReason.CESSATION_OF_OPERATION], + value: CrlReason.CESSATION_OF_OPERATION + }, + { label: crlReasonToNameMap[CrlReason.CERTIFICATE_HOLD], value: CrlReason.CERTIFICATE_HOLD }, + { + label: crlReasonToNameMap[CrlReason.PRIVILEGE_WITHDRAWN], + value: CrlReason.PRIVILEGE_WITHDRAWN + }, + { label: crlReasonToNameMap[CrlReason.A_A_COMPROMISE], value: CrlReason.A_A_COMPROMISE } +]; + +export const KEY_USAGES_OPTIONS = [ + { value: CertKeyUsage.DIGITAL_SIGNATURE, label: "Digital Signature" }, + { value: CertKeyUsage.KEY_ENCIPHERMENT, label: "Key Encipherment" }, + { value: CertKeyUsage.NON_REPUDIATION, label: "Non Repudiation" }, + { value: CertKeyUsage.DATA_ENCIPHERMENT, label: "Data Encipherment" }, + { value: CertKeyUsage.KEY_AGREEMENT, label: "Key Agreement" }, + { value: CertKeyUsage.KEY_CERT_SIGN, label: "Certificate Sign" }, + { value: CertKeyUsage.CRL_SIGN, label: "CRL Sign" }, + { value: CertKeyUsage.ENCIPHER_ONLY, label: "Encipher Only" }, + { value: CertKeyUsage.DECIPHER_ONLY, label: "Decipher Only" } +] as const; + +export const EXTENDED_KEY_USAGES_OPTIONS = [ + { value: CertExtendedKeyUsage.CLIENT_AUTH, label: "Client Auth" }, + { value: CertExtendedKeyUsage.SERVER_AUTH, label: "Server Auth" }, + { value: CertExtendedKeyUsage.EMAIL_PROTECTION, label: "Email Protection" }, + { value: CertExtendedKeyUsage.OCSP_SIGNING, label: "OCSP Signing" }, + { value: CertExtendedKeyUsage.CODE_SIGNING, label: "Code Signing" }, + { value: CertExtendedKeyUsage.TIMESTAMPING, label: "Timestamping" } +] as const; diff --git a/frontend/src/hooks/api/certificates/enums.tsx b/frontend/src/hooks/api/certificates/enums.tsx new file mode 100644 index 0000000000..566da75063 --- /dev/null +++ b/frontend/src/hooks/api/certificates/enums.tsx @@ -0,0 +1,45 @@ +export enum CertStatus { + ACTIVE = "active", + REVOKED = "revoked" +} + +export enum CertKeyAlgorithm { + RSA_2048 = "RSA_2048", + RSA_4096 = "RSA_4096", + ECDSA_P256 = "EC_prime256v1", + ECDSA_P384 = "EC_secp384r1" +} + +export enum CrlReason { + UNSPECIFIED = "UNSPECIFIED", + KEY_COMPROMISE = "KEY_COMPROMISE", + CA_COMPROMISE = "CA_COMPROMISE", + AFFILIATION_CHANGED = "AFFILIATION_CHANGED", + SUPERSEDED = "SUPERSEDED", + CESSATION_OF_OPERATION = "CESSATION_OF_OPERATION", + CERTIFICATE_HOLD = "CERTIFICATE_HOLD", + // REMOVE_FROM_CRL = "REMOVE_FROM_CRL", + PRIVILEGE_WITHDRAWN = "PRIVILEGE_WITHDRAWN", + A_A_COMPROMISE = "A_A_COMPROMISE" +} + +export enum CertKeyUsage { + DIGITAL_SIGNATURE = "digitalSignature", + KEY_ENCIPHERMENT = "keyEncipherment", + NON_REPUDIATION = "nonRepudiation", + DATA_ENCIPHERMENT = "dataEncipherment", + KEY_AGREEMENT = "keyAgreement", + KEY_CERT_SIGN = "keyCertSign", + CRL_SIGN = "cRLSign", + ENCIPHER_ONLY = "encipherOnly", + DECIPHER_ONLY = "decipherOnly" +} + +export enum CertExtendedKeyUsage { + CLIENT_AUTH = "clientAuth", + SERVER_AUTH = "serverAuth", + CODE_SIGNING = "codeSigning", + EMAIL_PROTECTION = "emailProtection", + TIMESTAMPING = "timeStamping", + OCSP_SIGNING = "ocspSigning" +} diff --git a/frontend/src/hooks/api/certificates/index.tsx b/frontend/src/hooks/api/certificates/index.tsx new file mode 100644 index 0000000000..dd922fd6a2 --- /dev/null +++ b/frontend/src/hooks/api/certificates/index.tsx @@ -0,0 +1,2 @@ +export { useDeleteCert, useRevokeCert } from "./mutations"; +export { useGetCert, useGetCertBody } from "./queries"; diff --git a/frontend/src/hooks/api/certificates/mutations.tsx b/frontend/src/hooks/api/certificates/mutations.tsx new file mode 100644 index 0000000000..d73a0cd15f --- /dev/null +++ b/frontend/src/hooks/api/certificates/mutations.tsx @@ -0,0 +1,43 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { workspaceKeys } from "../workspace"; +import { TCertificate, TDeleteCertDTO, TRevokeCertDTO } from "./types"; + +export const useDeleteCert = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ serialNumber }) => { + const { + data: { certificate } + } = await apiRequest.delete<{ certificate: TCertificate }>( + `/api/v1/pki/certificates/${serialNumber}` + ); + return certificate; + }, + onSuccess: (_, { projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.forWorkspaceCertificates(projectSlug)); + } + }); +}; + +export const useRevokeCert = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ serialNumber, revocationReason }) => { + const { + data: { certificate } + } = await apiRequest.post<{ certificate: TCertificate }>( + `/api/v1/pki/certificates/${serialNumber}/revoke`, + { + revocationReason + } + ); + return certificate; + }, + onSuccess: (_, { projectSlug }) => { + queryClient.invalidateQueries(workspaceKeys.forWorkspaceCertificates(projectSlug)); + } + }); +}; diff --git a/frontend/src/hooks/api/certificates/queries.tsx b/frontend/src/hooks/api/certificates/queries.tsx new file mode 100644 index 0000000000..50c751c06d --- /dev/null +++ b/frontend/src/hooks/api/certificates/queries.tsx @@ -0,0 +1,40 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { TCertificate } from "./types"; + +export const certKeys = { + getCertById: (serialNumber: string) => [{ serialNumber }, "cert"], + getCertBody: (serialNumber: string) => [{ serialNumber }, "certBody"] +}; + +export const useGetCert = (serialNumber: string) => { + return useQuery({ + queryKey: certKeys.getCertById(serialNumber), + queryFn: async () => { + const { + data: { certificate } + } = await apiRequest.get<{ certificate: TCertificate }>( + `/api/v1/pki/certificates/${serialNumber}` + ); + return certificate; + }, + enabled: Boolean(serialNumber) + }); +}; + +export const useGetCertBody = (serialNumber: string) => { + return useQuery({ + queryKey: certKeys.getCertBody(serialNumber), + queryFn: async () => { + const { data } = await apiRequest.get<{ + certificate: string; + certificateChain: string; + serialNumber: string; + }>(`/api/v1/pki/certificates/${serialNumber}/certificate`); + return data; + }, + enabled: Boolean(serialNumber) + }); +}; diff --git a/frontend/src/hooks/api/certificates/types.ts b/frontend/src/hooks/api/certificates/types.ts new file mode 100644 index 0000000000..a9bcf5fbcb --- /dev/null +++ b/frontend/src/hooks/api/certificates/types.ts @@ -0,0 +1,27 @@ +import { CertExtendedKeyUsage, CertKeyUsage, CertStatus } from "./enums"; + +export type TCertificate = { + id: string; + caId: string; + certificateTemplateId?: string; + status: CertStatus; + friendlyName: string; + commonName: string; + altNames: string; + serialNumber: string; + notBefore: string; + notAfter: string; + keyUsages: CertKeyUsage[]; + extendedKeyUsages: CertExtendedKeyUsage[]; +}; + +export type TDeleteCertDTO = { + projectSlug: string; + serialNumber: string; +}; + +export type TRevokeCertDTO = { + projectSlug: string; + serialNumber: string; + revocationReason: string; +}; diff --git a/frontend/src/hooks/api/cmeks/index.ts b/frontend/src/hooks/api/cmeks/index.ts new file mode 100644 index 0000000000..177955438b --- /dev/null +++ b/frontend/src/hooks/api/cmeks/index.ts @@ -0,0 +1,3 @@ +export * from "./mutations"; +export * from "./queries"; +export * from "./types"; diff --git a/frontend/src/hooks/api/cmeks/mutations.tsx b/frontend/src/hooks/api/cmeks/mutations.tsx new file mode 100644 index 0000000000..f3a1e6eaeb --- /dev/null +++ b/frontend/src/hooks/api/cmeks/mutations.tsx @@ -0,0 +1,90 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; +import { encodeBase64 } from "tweetnacl-util"; + +import { apiRequest } from "@app/config/request"; +import { cmekKeys } from "@app/hooks/api/cmeks/queries"; +import { + TCmekDecrypt, + TCmekDecryptResponse, + TCmekEncrypt, + TCmekEncryptResponse, + TCreateCmek, + TDeleteCmek, + TUpdateCmek +} from "@app/hooks/api/cmeks/types"; + +export const useCreateCmek = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (payload: TCreateCmek) => { + const { data } = await apiRequest.post("/api/v1/kms/keys", payload); + + return data; + }, + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(cmekKeys.getCmeksByProjectId({ projectId })); + } + }); +}; + +export const useUpdateCmek = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ keyId, name, description, isDisabled }: TUpdateCmek) => { + const { data } = await apiRequest.patch(`/api/v1/kms/keys/${keyId}`, { + name, + description, + isDisabled + }); + + return data; + }, + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(cmekKeys.getCmeksByProjectId({ projectId })); + } + }); +}; + +export const useDeleteCmek = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ keyId }: TDeleteCmek) => { + const { data } = await apiRequest.delete(`/api/v1/kms/keys/${keyId}`); + + return data; + }, + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(cmekKeys.getCmeksByProjectId({ projectId })); + } + }); +}; + +export const useCmekEncrypt = () => { + return useMutation({ + mutationFn: async ({ keyId, plaintext, isBase64Encoded }: TCmekEncrypt) => { + const { data } = await apiRequest.post( + `/api/v1/kms/keys/${keyId}/encrypt`, + { + plaintext: isBase64Encoded ? plaintext : encodeBase64(Buffer.from(plaintext)) + } + ); + + return data; + } + }); +}; + +export const useCmekDecrypt = () => { + return useMutation({ + mutationFn: async ({ keyId, ciphertext }: TCmekDecrypt) => { + const { data } = await apiRequest.post( + `/api/v1/kms/keys/${keyId}/decrypt`, + { + ciphertext + } + ); + + return data; + } + }); +}; diff --git a/frontend/src/hooks/api/cmeks/queries.tsx b/frontend/src/hooks/api/cmeks/queries.tsx new file mode 100644 index 0000000000..5c2dac8952 --- /dev/null +++ b/frontend/src/hooks/api/cmeks/queries.tsx @@ -0,0 +1,53 @@ +import { useQuery, UseQueryOptions } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; +import { CmekOrderBy, TListProjectCmeksDTO, TProjectCmeksList } from "@app/hooks/api/cmeks/types"; +import { OrderByDirection } from "@app/hooks/api/generic/types"; + +export const cmekKeys = { + all: ["cmek"] as const, + lists: () => [...cmekKeys.all, "list"] as const, + getCmeksByProjectId: ({ projectId, ...filters }: TListProjectCmeksDTO) => + [...cmekKeys.lists(), projectId, filters] as const +}; + +export const useGetCmeksByProjectId = ( + { + projectId, + offset = 0, + limit = 100, + orderBy = CmekOrderBy.Name, + orderDirection = OrderByDirection.ASC, + search = "" + }: TListProjectCmeksDTO, + options?: Omit< + UseQueryOptions< + TProjectCmeksList, + unknown, + TProjectCmeksList, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + return useQuery({ + queryKey: cmekKeys.getCmeksByProjectId({ + projectId, + offset, + limit, + orderBy, + orderDirection, + search + }), + queryFn: async () => { + const { data } = await apiRequest.get("/api/v1/kms/keys", { + params: { projectId, offset, limit, search, orderBy, orderDirection } + }); + + return data; + }, + enabled: Boolean(projectId) && (options?.enabled ?? true), + keepPreviousData: true, + ...options + }); +}; diff --git a/frontend/src/hooks/api/cmeks/types.ts b/frontend/src/hooks/api/cmeks/types.ts new file mode 100644 index 0000000000..c557c1fc28 --- /dev/null +++ b/frontend/src/hooks/api/cmeks/types.ts @@ -0,0 +1,58 @@ +import { OrderByDirection } from "@app/hooks/api/generic/types"; + +export type TCmek = { + id: string; + name: string; + description?: string; + encryptionAlgorithm: EncryptionAlgorithm; + projectId: string; + isDisabled: boolean; + isReserved: boolean; + orgId: string; + version: number; + createdAt: string; + updatedAt: string; +}; + +type ProjectRef = { projectId: string }; +type KeyRef = { keyId: string }; + +export type TCreateCmek = Pick & ProjectRef; +export type TUpdateCmek = KeyRef & + Partial> & + ProjectRef; +export type TDeleteCmek = KeyRef & ProjectRef; + +export type TCmekEncrypt = KeyRef & { plaintext: string; isBase64Encoded?: boolean }; +export type TCmekDecrypt = KeyRef & { ciphertext: string }; + +export type TProjectCmeksList = { + keys: TCmek[]; + totalCount: number; +}; + +export type TListProjectCmeksDTO = { + projectId: string; + offset?: number; + limit?: number; + orderBy?: CmekOrderBy; + orderDirection?: OrderByDirection; + search?: string; +}; + +export type TCmekEncryptResponse = { + ciphertext: string; +}; + +export type TCmekDecryptResponse = { + plaintext: string; +}; + +export enum CmekOrderBy { + Name = "name" +} + +export enum EncryptionAlgorithm { + AES_GCM_256 = "aes-256-gcm", + AES_GCM_128 = "aes-128-gcm" +} diff --git a/frontend/src/hooks/api/dashboard/index.ts b/frontend/src/hooks/api/dashboard/index.ts new file mode 100644 index 0000000000..83206bdf8e --- /dev/null +++ b/frontend/src/hooks/api/dashboard/index.ts @@ -0,0 +1,5 @@ +export { + useGetProjectSecretsDetails, + useGetProjectSecretsOverview, + useGetProjectSecretsQuickSearch +} from "./queries"; diff --git a/frontend/src/hooks/api/dashboard/queries.tsx b/frontend/src/hooks/api/dashboard/queries.tsx new file mode 100644 index 0000000000..adff8bb0ed --- /dev/null +++ b/frontend/src/hooks/api/dashboard/queries.tsx @@ -0,0 +1,388 @@ +import { useCallback } from "react"; +import { useQuery, UseQueryOptions } from "@tanstack/react-query"; +import axios from "axios"; + +import { createNotification } from "@app/components/notifications"; +import { apiRequest } from "@app/config/request"; +import { + DashboardProjectSecretsByKeys, + DashboardProjectSecretsDetails, + DashboardProjectSecretsDetailsResponse, + DashboardProjectSecretsOverview, + DashboardProjectSecretsOverviewResponse, + DashboardSecretsOrderBy, + TDashboardProjectSecretsQuickSearch, + TDashboardProjectSecretsQuickSearchResponse, + TGetDashboardProjectSecretsByKeys, + TGetDashboardProjectSecretsDetailsDTO, + TGetDashboardProjectSecretsOverviewDTO, + TGetDashboardProjectSecretsQuickSearchDTO +} from "@app/hooks/api/dashboard/types"; +import { OrderByDirection } from "@app/hooks/api/generic/types"; +import { mergePersonalSecrets } from "@app/hooks/api/secrets/queries"; +import { groupBy, unique } from "@app/lib/fn/array"; + +export const dashboardKeys = { + all: () => ["dashboard"] as const, + getDashboardSecrets: ({ + projectId, + secretPath + }: Pick) => + [...dashboardKeys.all(), { projectId, secretPath }] as const, + getProjectSecretsOverview: ({ + projectId, + secretPath, + ...params + }: TGetDashboardProjectSecretsOverviewDTO) => + [ + ...dashboardKeys.getDashboardSecrets({ projectId, secretPath }), + "secrets-overview", + params + ] as const, + getProjectSecretsDetails: ({ + projectId, + secretPath, + environment, + ...params + }: TGetDashboardProjectSecretsDetailsDTO) => + [ + ...dashboardKeys.getDashboardSecrets({ projectId, secretPath }), + "secrets-details", + environment, + params + ] as const, + getProjectSecretsQuickSearch: ({ + projectId, + secretPath, + ...params + }: TGetDashboardProjectSecretsQuickSearchDTO) => + [ + ...dashboardKeys.getDashboardSecrets({ projectId, secretPath }), + "quick-search", + params + ] as const +}; + +export const fetchProjectSecretsOverview = async ({ + environments, + ...params +}: TGetDashboardProjectSecretsOverviewDTO) => { + const { data } = await apiRequest.get( + "/api/v1/dashboard/secrets-overview", + { + params: { + ...params, + environments: encodeURIComponent(environments.join(",")) + } + } + ); + + return data; +}; + +export const fetchProjectSecretsDetails = async ({ + tags, + ...params +}: TGetDashboardProjectSecretsDetailsDTO) => { + const { data } = await apiRequest.get( + "/api/v1/dashboard/secrets-details", + { + params: { + ...params, + tags: encodeURIComponent( + Object.entries(tags) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + .filter(([_, enabled]) => enabled) + .map(([tag]) => tag) + .join(",") + ) + } + } + ); + + return data; +}; + +export const fetchDashboardProjectSecretsByKeys = async ({ + keys, + ...params +}: TGetDashboardProjectSecretsByKeys) => { + const { data } = await apiRequest.get( + "/api/v1/dashboard/secrets-by-keys", + { + params: { + ...params, + keys: encodeURIComponent(keys.join(",")) + } + } + ); + + return data; +}; + +export const useGetProjectSecretsOverview = ( + { + projectId, + secretPath, + offset = 0, + limit = 100, + orderBy = DashboardSecretsOrderBy.Name, + orderDirection = OrderByDirection.ASC, + search = "", + includeSecrets, + includeFolders, + includeDynamicSecrets, + environments + }: TGetDashboardProjectSecretsOverviewDTO, + options?: Omit< + UseQueryOptions< + DashboardProjectSecretsOverviewResponse, + unknown, + DashboardProjectSecretsOverview, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + return useQuery({ + ...options, + // wait for all values to be available + enabled: Boolean(projectId) && (options?.enabled ?? true) && Boolean(environments.length), + queryKey: dashboardKeys.getProjectSecretsOverview({ + secretPath, + search, + limit, + orderBy, + orderDirection, + offset, + projectId, + includeSecrets, + includeFolders, + includeDynamicSecrets, + environments + }), + queryFn: () => + fetchProjectSecretsOverview({ + secretPath, + search, + limit, + orderBy, + orderDirection, + offset, + projectId, + includeSecrets, + includeFolders, + includeDynamicSecrets, + environments + }), + onError: (error) => { + if (axios.isAxiosError(error)) { + const serverResponse = error.response?.data as { message: string }; + createNotification({ + title: "Error fetching secret details", + type: "error", + text: serverResponse.message + }); + } + }, + select: useCallback((data: Awaited>) => { + const { secrets, ...select } = data; + const uniqueSecrets = secrets ? unique(secrets, (i) => i.secretKey) : []; + + const uniqueFolders = select.folders ? unique(select.folders, (i) => i.name) : []; + + const uniqueDynamicSecrets = select.dynamicSecrets + ? unique(select.dynamicSecrets, (i) => i.name) + : []; + + return { + ...select, + secrets: secrets ? mergePersonalSecrets(secrets) : undefined, + totalUniqueSecretsInPage: uniqueSecrets.length, + totalUniqueDynamicSecretsInPage: uniqueDynamicSecrets.length, + totalUniqueFoldersInPage: uniqueFolders.length + }; + }, []), + keepPreviousData: true + }); +}; + +export const useGetProjectSecretsDetails = ( + { + projectId, + secretPath, + environment, + offset = 0, + limit = 100, + orderBy = DashboardSecretsOrderBy.Name, + orderDirection = OrderByDirection.ASC, + search = "", + includeSecrets, + includeFolders, + includeImports, + includeDynamicSecrets, + tags + }: TGetDashboardProjectSecretsDetailsDTO, + options?: Omit< + UseQueryOptions< + DashboardProjectSecretsDetailsResponse, + unknown, + DashboardProjectSecretsDetails, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + return useQuery({ + ...options, + // wait for all values to be available + enabled: Boolean(projectId) && (options?.enabled ?? true), + queryKey: dashboardKeys.getProjectSecretsDetails({ + secretPath, + search, + limit, + orderBy, + orderDirection, + offset, + projectId, + environment, + includeSecrets, + includeFolders, + includeImports, + includeDynamicSecrets, + tags + }), + queryFn: () => + fetchProjectSecretsDetails({ + secretPath, + search, + limit, + orderBy, + orderDirection, + offset, + projectId, + environment, + includeSecrets, + includeFolders, + includeImports, + includeDynamicSecrets, + tags + }), + onError: (error) => { + if (axios.isAxiosError(error)) { + const serverResponse = error.response?.data as { message: string }; + createNotification({ + title: "Error fetching secret details", + type: "error", + text: serverResponse.message + }); + } + }, + select: useCallback( + (data: Awaited>) => ({ + ...data, + secrets: data.secrets ? mergePersonalSecrets(data.secrets) : undefined + }), + [] + ), + keepPreviousData: true + }); +}; + +export const fetchProjectSecretsQuickSearch = async ({ + environments, + tags, + ...params +}: TGetDashboardProjectSecretsQuickSearchDTO) => { + const { data } = await apiRequest.get( + "/api/v1/dashboard/secrets-deep-search", + { + params: { + ...params, + environments: encodeURIComponent(environments.join(",")), + tags: encodeURIComponent( + Object.entries(tags) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + .filter(([_, enabled]) => enabled) + .map(([tag]) => tag) + .join(",") + ) + } + } + ); + + return data; +}; + +export const useGetProjectSecretsQuickSearch = ( + { + projectId, + secretPath, + search = "", + environments, + tags + }: TGetDashboardProjectSecretsQuickSearchDTO, + options?: Omit< + UseQueryOptions< + TDashboardProjectSecretsQuickSearchResponse, + unknown, + TDashboardProjectSecretsQuickSearch, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + return useQuery({ + ...options, + enabled: + Boolean(search?.trim() || Object.values(tags).length) && + (options?.enabled ?? true) && + Boolean(environments.length), + queryKey: dashboardKeys.getProjectSecretsQuickSearch({ + secretPath, + search, + projectId, + environments, + tags + }), + queryFn: () => + fetchProjectSecretsQuickSearch({ + secretPath, + search, + projectId, + environments, + tags + }), + onError: (error) => { + if (axios.isAxiosError(error)) { + const serverResponse = error.response?.data as { message: string }; + createNotification({ + title: "Error fetching secrets deep search", + type: "error", + text: serverResponse.message + }); + } + }, + select: useCallback((data: Awaited>) => { + const { secrets, folders, dynamicSecrets } = data; + + const groupedFolders = groupBy(folders, (folder) => folder.path); + const groupedSecrets = groupBy( + mergePersonalSecrets(secrets), + (secret) => `${secret.path === "/" ? "" : secret.path}/${secret.key}` + ); + const groupedDynamicSecrets = groupBy( + dynamicSecrets, + (dynamicSecret) => + `${dynamicSecret.path === "/" ? "" : dynamicSecret.path}/${dynamicSecret.name}` + ); + + return { + folders: groupedFolders, + secrets: groupedSecrets, + dynamicSecrets: groupedDynamicSecrets + }; + }, []), + keepPreviousData: true + }); +}; diff --git a/frontend/src/hooks/api/dashboard/types.ts b/frontend/src/hooks/api/dashboard/types.ts new file mode 100644 index 0000000000..444614b82b --- /dev/null +++ b/frontend/src/hooks/api/dashboard/types.ts @@ -0,0 +1,102 @@ +import { TDynamicSecret } from "@app/hooks/api/dynamicSecret/types"; +import { OrderByDirection } from "@app/hooks/api/generic/types"; +import { TSecretFolder } from "@app/hooks/api/secretFolders/types"; +import { TSecretImport } from "@app/hooks/api/secretImports/types"; +import { SecretV3Raw, SecretV3RawSanitized } from "@app/hooks/api/secrets/types"; + +export type DashboardProjectSecretsOverviewResponse = { + folders?: (TSecretFolder & { environment: string })[]; + dynamicSecrets?: (TDynamicSecret & { environment: string })[]; + secrets?: SecretV3Raw[]; + totalSecretCount?: number; + totalFolderCount?: number; + totalDynamicSecretCount?: number; + totalCount: number; + totalUniqueSecretsInPage: number; + totalUniqueDynamicSecretsInPage: number; + totalUniqueFoldersInPage: number; +}; + +export type DashboardProjectSecretsDetailsResponse = { + imports?: TSecretImport[]; + folders?: TSecretFolder[]; + dynamicSecrets?: TDynamicSecret[]; + secrets?: SecretV3Raw[]; + totalImportCount?: number; + totalFolderCount?: number; + totalDynamicSecretCount?: number; + totalSecretCount?: number; + totalCount: number; +}; + +export type DashboardProjectSecretsByKeys = { + secrets: SecretV3Raw[]; +}; + +export type DashboardProjectSecretsOverview = Omit< + DashboardProjectSecretsOverviewResponse, + "secrets" +> & { + secrets?: SecretV3RawSanitized[]; +}; + +export type DashboardProjectSecretsDetails = Omit< + DashboardProjectSecretsDetailsResponse, + "secrets" +> & { + secrets?: SecretV3RawSanitized[]; +}; + +export enum DashboardSecretsOrderBy { + Name = "name" +} + +export type TGetDashboardProjectSecretsOverviewDTO = { + projectId: string; + secretPath: string; + offset?: number; + limit?: number; + orderBy?: DashboardSecretsOrderBy; + orderDirection?: OrderByDirection; + search?: string; + includeSecrets?: boolean; + includeFolders?: boolean; + includeDynamicSecrets?: boolean; + environments: string[]; +}; + +export type TGetDashboardProjectSecretsDetailsDTO = Omit< + TGetDashboardProjectSecretsOverviewDTO, + "environments" +> & { + environment: string; + includeImports?: boolean; + tags: Record; +}; + +export type TDashboardProjectSecretsQuickSearchResponse = { + folders: (TSecretFolder & { environment: string; path: string })[]; + dynamicSecrets: (TDynamicSecret & { environment: string; path: string })[]; + secrets: SecretV3Raw[]; +}; + +export type TDashboardProjectSecretsQuickSearch = { + folders: Record; + secrets: Record; + dynamicSecrets: Record; +}; + +export type TGetDashboardProjectSecretsQuickSearchDTO = { + projectId: string; + secretPath: string; + tags: Record; + search: string; + environments: string[]; +}; + +export type TGetDashboardProjectSecretsByKeys = { + projectId: string; + secretPath: string; + environment: string; + keys: string[]; +}; diff --git a/frontend/src/hooks/api/dynamicSecret/mutation.ts b/frontend/src/hooks/api/dynamicSecret/mutation.ts index 5f41e38f57..f8fbb4d055 100644 --- a/frontend/src/hooks/api/dynamicSecret/mutation.ts +++ b/frontend/src/hooks/api/dynamicSecret/mutation.ts @@ -1,6 +1,7 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; +import { dashboardKeys } from "@app/hooks/api/dashboard/queries"; import { dynamicSecretKeys } from "./queries"; import { @@ -22,6 +23,8 @@ export const useCreateDynamicSecret = () => { return data.dynamicSecret; }, onSuccess: (_, { path, environmentSlug, projectSlug }) => { + // TODO: optimize but we currently don't pass projectId + queryClient.invalidateQueries(dashboardKeys.all()); queryClient.invalidateQueries(dynamicSecretKeys.list({ path, projectSlug, environmentSlug })); } }); @@ -39,6 +42,8 @@ export const useUpdateDynamicSecret = () => { return data.dynamicSecret; }, onSuccess: (_, { path, environmentSlug, projectSlug }) => { + // TODO: optimize but currently don't pass projectId + queryClient.invalidateQueries(dashboardKeys.all()); queryClient.invalidateQueries(dynamicSecretKeys.list({ path, projectSlug, environmentSlug })); } }); @@ -56,6 +61,8 @@ export const useDeleteDynamicSecret = () => { return data.dynamicSecret; }, onSuccess: (_, { path, environmentSlug, projectSlug }) => { + // TODO: optimize but currently don't pass projectId + queryClient.invalidateQueries(dashboardKeys.all()); queryClient.invalidateQueries(dynamicSecretKeys.list({ path, projectSlug, environmentSlug })); } }); diff --git a/frontend/src/hooks/api/dynamicSecret/queries.ts b/frontend/src/hooks/api/dynamicSecret/queries.ts index 481b431cc5..f84fd07124 100644 --- a/frontend/src/hooks/api/dynamicSecret/queries.ts +++ b/frontend/src/hooks/api/dynamicSecret/queries.ts @@ -71,6 +71,34 @@ export const useGetDynamicSecretDetails = ({ }); }; +export const useGetDynamicSecretProviderData = ({ + tenantId, + applicationId, + clientSecret, + enabled +}: { + tenantId: string; + applicationId: string; + clientSecret: string; + enabled: boolean +}) => { + return useQuery({ + queryKey: ["users"], + queryFn: async () => { + const { data } = await apiRequest.post<{id:string, email: string, name:string}[]>( + "/api/v1/dynamic-secrets/entra-id/users", + { + tenantId, + applicationId, + clientSecret + } + ); + return data; + }, + enabled + }); +}; + export const useGetDynamicSecretsOfAllEnv = ({ path, projectSlug, diff --git a/frontend/src/hooks/api/dynamicSecret/types.ts b/frontend/src/hooks/api/dynamicSecret/types.ts index a9aab83183..7ac8d4147c 100644 --- a/frontend/src/hooks/api/dynamicSecret/types.ts +++ b/frontend/src/hooks/api/dynamicSecret/types.ts @@ -18,59 +18,219 @@ export type TDynamicSecret = { export enum DynamicSecretProviders { SqlDatabase = "sql-database", Cassandra = "cassandra", - AwsIam = "aws-iam" + AwsIam = "aws-iam", + Redis = "redis", + AwsElastiCache = "aws-elasticache", + MongoAtlas = "mongo-db-atlas", + ElasticSearch = "elastic-search", + MongoDB = "mongo-db", + RabbitMq = "rabbit-mq", + AzureEntraId = "azure-entra-id", + Ldap = "ldap", + SapHana = "sap-hana", + Snowflake = "snowflake" } export enum SqlProviders { Postgres = "postgres", MySql = "mysql2", - Oracle = "oracledb" + Oracle = "oracledb", + MsSQL = "mssql" } export type TDynamicSecretProvider = | { - type: DynamicSecretProviders.SqlDatabase; - inputs: { - client: SqlProviders; - host: string; - port: number; - database: string; - username: string; - password: string; - creationStatement: string; - revocationStatement: string; - renewStatement?: string; - ca?: string | undefined; - }; - } + type: DynamicSecretProviders.SqlDatabase; + inputs: { + client: SqlProviders; + host: string; + port: number; + database: string; + username: string; + password: string; + creationStatement: string; + revocationStatement: string; + renewStatement?: string; + ca?: string | undefined; + }; + } | { - type: DynamicSecretProviders.Cassandra; - inputs: { - host: string; - port: number; - keyspace?: string; - localDataCenter: string; - username: string; - password: string; - creationStatement: string; - revocationStatement: string; - renewStatement?: string; - ca?: string | undefined; - }; - } + type: DynamicSecretProviders.Cassandra; + inputs: { + host: string; + port: number; + keyspace?: string; + localDataCenter: string; + username: string; + password: string; + creationStatement: string; + revocationStatement: string; + renewStatement?: string; + ca?: string | undefined; + }; + } | { - type: DynamicSecretProviders.AwsIam; - inputs: { - accessKey: string; - secretAccessKey: string; - region: string; - awsPath?: string; - policyDocument?: string; - userGroups?: string; - policyArns?: string; - }; - }; + type: DynamicSecretProviders.AwsIam; + inputs: { + accessKey: string; + secretAccessKey: string; + region: string; + awsPath?: string; + policyDocument?: string; + userGroups?: string; + policyArns?: string; + }; + } + | { + type: DynamicSecretProviders.Redis; + inputs: { + host: string; + port: number; + username: string; + password?: string; + creationStatement: string; + renewStatement?: string; + revocationStatement: string; + ca?: string | undefined; + }; + } + | { + type: DynamicSecretProviders.AwsElastiCache; + inputs: { + clusterName: string; + accessKeyId: string; + secretAccessKey: string; + region: string; + creationStatement: string; + revocationStatement: string; + ca?: string | undefined; + }; + } + | { + type: DynamicSecretProviders.MongoAtlas; + inputs: { + adminPublicKey: string; + adminPrivateKey: string; + groupId: string; + roles: { + databaseName: string; + roleName: string; + collectionName?: string; + }[]; + scopes?: { + name: string; + type: string; + }[]; + }; + } + | { + type: DynamicSecretProviders.MongoDB; + inputs: { + host: string; + port?: number; + database: string; + username: string; + password: string; + ca?: string | undefined; + roles: ( + | { + databaseName: string; + roleName: string; + } + | string + )[]; + }; + } + | { + type: DynamicSecretProviders.ElasticSearch; + inputs: { + host: string; + port: number; + ca?: string | undefined; + roles: string[]; + auth: + | { + type: "user"; + username: string; + password: string; + } + | { + type: "api-key"; + apiKey: string; + apiKeyId: string; + }; + }; + } + | { + type: DynamicSecretProviders.RabbitMq; + inputs: { + host: string; + port: number; + username: string; + password: string; + + tags: string[]; + virtualHost: { + name: string; + permissions: { + configure: string; + write: string; + read: string; + }; + }; + ca?: string; + }; + } + | { + type: DynamicSecretProviders.AzureEntraId; + inputs: { + tenantId: string; + userId: string; + email: string; + applicationId: string; + clientSecret: string; + }; + } + | { + type: DynamicSecretProviders.Ldap; + inputs: { + url: string; + binddn: string; + bindpass: string; + ca?: string | undefined; + credentialType: string; + creationLdif?: string; + revocationLdif?: string; + rollbackLdif?: string; + rotationLdif?: string; + }; + } + | { + type: DynamicSecretProviders.SapHana; + inputs: { + host: string; + port: number; + username: string; + password: string; + creationStatement: string; + revocationStatement: string; + renewStatement?: string; + ca?: string | undefined; + }; + } + | { + type: DynamicSecretProviders.Snowflake; + inputs: { + orgId: string; + accountId: string; + username: string; + password: string; + creationStatement: string; + revocationStatement: string; + renewStatement?: string; + }; + }; export type TCreateDynamicSecretDTO = { projectSlug: string; provider: TDynamicSecretProvider; diff --git a/frontend/src/hooks/api/externalGroupOrgRoleMappings/index.ts b/frontend/src/hooks/api/externalGroupOrgRoleMappings/index.ts new file mode 100644 index 0000000000..177955438b --- /dev/null +++ b/frontend/src/hooks/api/externalGroupOrgRoleMappings/index.ts @@ -0,0 +1,3 @@ +export * from "./mutations"; +export * from "./queries"; +export * from "./types"; diff --git a/frontend/src/hooks/api/externalGroupOrgRoleMappings/mutations.tsx b/frontend/src/hooks/api/externalGroupOrgRoleMappings/mutations.tsx new file mode 100644 index 0000000000..aac46726a1 --- /dev/null +++ b/frontend/src/hooks/api/externalGroupOrgRoleMappings/mutations.tsx @@ -0,0 +1,19 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; +import { externalGroupOrgRoleMappingKeys } from "@app/hooks/api/externalGroupOrgRoleMappings/queries"; +import { TSyncExternalGroupOrgRoleMappingsDTO } from "@app/hooks/api/externalGroupOrgRoleMappings/types"; + +export const useUpdateExternalGroupOrgRoleMappings = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (payload: TSyncExternalGroupOrgRoleMappingsDTO) => { + const { data } = await apiRequest.put("/api/v1/external-group-mappings", payload); + + return data; + }, + onSuccess: () => { + queryClient.invalidateQueries(externalGroupOrgRoleMappingKeys.list()); + } + }); +}; diff --git a/frontend/src/hooks/api/externalGroupOrgRoleMappings/queries.tsx b/frontend/src/hooks/api/externalGroupOrgRoleMappings/queries.tsx new file mode 100644 index 0000000000..620ec0447b --- /dev/null +++ b/frontend/src/hooks/api/externalGroupOrgRoleMappings/queries.tsx @@ -0,0 +1,33 @@ +import { useQuery, UseQueryOptions } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; +import { TExternalGroupOrgRoleMappingList } from "@app/hooks/api/externalGroupOrgRoleMappings/types"; + +export const externalGroupOrgRoleMappingKeys = { + all: ["external-group-org-role-mapping"] as const, + list: () => [...externalGroupOrgRoleMappingKeys.all, "list"] as const +}; + +export const useGetExternalGroupOrgRoleMappings = ( + options?: Omit< + UseQueryOptions< + TExternalGroupOrgRoleMappingList, + unknown, + TExternalGroupOrgRoleMappingList, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + return useQuery({ + queryKey: externalGroupOrgRoleMappingKeys.list(), + queryFn: async () => { + const { data } = await apiRequest.get( + "/api/v1/external-group-mappings" + ); + + return data; + }, + ...options + }); +}; diff --git a/frontend/src/hooks/api/externalGroupOrgRoleMappings/types.ts b/frontend/src/hooks/api/externalGroupOrgRoleMappings/types.ts new file mode 100644 index 0000000000..3130457426 --- /dev/null +++ b/frontend/src/hooks/api/externalGroupOrgRoleMappings/types.ts @@ -0,0 +1,18 @@ +export type TSyncExternalGroupOrgRoleMappingsDTO = { + mappings: { + groupName: string; + roleSlug: string; + }[]; +}; + +export type TExternalGroupOrgRoleMapping = { + id: string; + groupName: string; + role: string; + roleId: string; + orgId: string; + createdAt: string; + updatedAt: string; +}; + +export type TExternalGroupOrgRoleMappingList = TExternalGroupOrgRoleMapping[]; diff --git a/frontend/src/hooks/api/generic/types.ts b/frontend/src/hooks/api/generic/types.ts new file mode 100644 index 0000000000..1f09c5c0ad --- /dev/null +++ b/frontend/src/hooks/api/generic/types.ts @@ -0,0 +1,4 @@ +export enum OrderByDirection { + ASC = "asc", + DESC = "desc" +} diff --git a/frontend/src/hooks/api/groups/mutations.tsx b/frontend/src/hooks/api/groups/mutations.tsx index 32d718e370..445ae10bc6 100644 --- a/frontend/src/hooks/api/groups/mutations.tsx +++ b/frontend/src/hooks/api/groups/mutations.tsx @@ -3,128 +3,120 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; import { organizationKeys } from "../organization/queries"; +import { userKeys } from "../users/query-keys"; import { groupKeys } from "./queries"; import { TGroup } from "./types"; export const useCreateGroup = () => { - const queryClient = useQueryClient(); - return useMutation({ - mutationFn: async ({ - name, - slug, - role - }: { - name: string; - slug: string; - organizationId: string; - role?: string; - }) => { - const { - data: group - } = await apiRequest.post("/api/v1/groups", { - name, - slug, - role - }); - - return group; - }, - onSuccess: (_, { organizationId }) => { - queryClient.invalidateQueries(organizationKeys.getOrgGroups(organizationId)); - } - }); + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + name, + slug, + role + }: { + name: string; + slug: string; + organizationId: string; + role?: string; + }) => { + const { data: group } = await apiRequest.post("/api/v1/groups", { + name, + slug, + role + }); + + return group; + }, + onSuccess: (_, { organizationId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgGroups(organizationId)); + } + }); }; export const useUpdateGroup = () => { - const queryClient = useQueryClient(); - return useMutation({ - mutationFn: async ({ - currentSlug, - name, - slug, - role - }: { - currentSlug: string; - name?: string; - slug?: string; - role?: string; - }) => { - const { - data: group - } = await apiRequest.patch(`/api/v1/groups/${currentSlug}`, { - name, - slug, - role - }); - - return group; - }, - onSuccess: ({ orgId }) => { - queryClient.invalidateQueries(organizationKeys.getOrgGroups(orgId)); - } - }); + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + id, + name, + slug, + role + }: { + id: string; + name?: string; + slug?: string; + role?: string; + }) => { + const { data: group } = await apiRequest.patch(`/api/v1/groups/${id}`, { + name, + slug, + role + }); + + return group; + }, + onSuccess: ({ orgId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgGroups(orgId)); + } + }); }; export const useDeleteGroup = () => { - const queryClient = useQueryClient(); - return useMutation({ - mutationFn: async ({ - slug - }: { - slug: string; - }) => { - const { - data: group - } = await apiRequest.delete(`/api/v1/groups/${slug}`); + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ id }: { id: string }) => { + const { data: group } = await apiRequest.delete(`/api/v1/groups/${id}`); - return group; - }, - onSuccess: ({ orgId }) => { - queryClient.invalidateQueries(organizationKeys.getOrgGroups(orgId)); - } - }); + return group; + }, + onSuccess: ({ orgId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgGroups(orgId)); + } + }); }; export const useAddUserToGroup = () => { - const queryClient = useQueryClient(); - return useMutation({ - mutationFn: async ({ - slug, - username - }: { - slug: string; - username: string; - }) => { - const { - data - } = await apiRequest.post(`/api/v1/groups/${slug}/users/${username}`); + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + groupId, + username + }: { + groupId: string; + username: string; + slug: string; + }) => { + const { data } = await apiRequest.post(`/api/v1/groups/${groupId}/users/${username}`); - return data; - }, - onSuccess: (_, { slug }) => { - queryClient.invalidateQueries(groupKeys.forGroupUserMemberships(slug)); - } - }); + return data; + }, + onSuccess: (_, { slug }) => { + queryClient.invalidateQueries(groupKeys.forGroupUserMemberships(slug)); + } + }); }; export const useRemoveUserFromGroup = () => { - const queryClient = useQueryClient(); - return useMutation({ - mutationFn: async ({ - slug, - username - }: { - slug: string; - username: string; - }) => { - const { - data - } = await apiRequest.delete(`/api/v1/groups/${slug}/users/${username}`); + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + username, + groupId + }: { + slug: string; + username: string; + groupId: string; + }) => { + const { data } = await apiRequest.delete( + `/api/v1/groups/${groupId}/users/${username}` + ); - return data; - }, - onSuccess: (_, { slug }) => { - queryClient.invalidateQueries(groupKeys.forGroupUserMemberships(slug)); - } - }); -}; \ No newline at end of file + return data; + }, + onSuccess: (_, { slug, username }) => { + queryClient.invalidateQueries(groupKeys.forGroupUserMemberships(slug)); + queryClient.invalidateQueries(userKeys.listUserGroupMemberships(username)); + } + }); +}; diff --git a/frontend/src/hooks/api/groups/queries.tsx b/frontend/src/hooks/api/groups/queries.tsx index ba05431359..b239b0a614 100644 --- a/frontend/src/hooks/api/groups/queries.tsx +++ b/frontend/src/hooks/api/groups/queries.tsx @@ -4,18 +4,19 @@ import { apiRequest } from "@app/config/request"; export const groupKeys = { allGroupUserMemberships: () => ["group-user-memberships"] as const, - forGroupUserMemberships: (slug: string) => [...groupKeys.allGroupUserMemberships(), slug] as const, + forGroupUserMemberships: (slug: string) => + [...groupKeys.allGroupUserMemberships(), slug] as const, specificGroupUserMemberships: ({ slug, offset, limit, - username + search }: { slug: string; offset: number; limit: number; - username: string; - }) => [...groupKeys.forGroupUserMemberships(slug), { offset, limit, username }] as const + search: string; + }) => [...groupKeys.forGroupUserMemberships(slug), { offset, limit, search }] as const }; type TUser = { @@ -28,38 +29,42 @@ type TUser = { }; export const useListGroupUsers = ({ + id, groupSlug, offset = 0, limit = 10, - username + search }: { + id: string; groupSlug: string; offset: number; limit: number; - username: string; + search: string; }) => { return useQuery({ queryKey: groupKeys.specificGroupUserMemberships({ slug: groupSlug, offset, limit, - username + search }), enabled: Boolean(groupSlug), + keepPreviousData: true, queryFn: async () => { const params = new URLSearchParams({ offset: String(offset), limit: String(limit), - username + search }); - - const { data } = await apiRequest.get<{ users: TUser[]; totalCount: number; }>( - `/api/v1/groups/${groupSlug}/users`, { + + const { data } = await apiRequest.get<{ users: TUser[]; totalCount: number }>( + `/api/v1/groups/${id}/users`, + { params } ); - + return data; - }, + } }); }; diff --git a/frontend/src/hooks/api/groups/types.ts b/frontend/src/hooks/api/groups/types.ts index 3db2357cd3..3f69b9a0e7 100644 --- a/frontend/src/hooks/api/groups/types.ts +++ b/frontend/src/hooks/api/groups/types.ts @@ -4,7 +4,7 @@ import { TOrgRole } from "../roles/types"; export type TGroupOrgMembership = TGroup & { customRole?: TOrgRole; -} +}; export type TGroup = { id: string; @@ -33,4 +33,11 @@ export type TGroupMembership = { }[]; createdAt: string; updatedAt: string; -}; \ No newline at end of file +}; + +export type TGroupWithProjectMemberships = { + id: string; + name: string; + slug: string; + orgId: string; +}; diff --git a/frontend/src/hooks/api/identities/constants.tsx b/frontend/src/hooks/api/identities/constants.tsx index 51495d4f24..0c57ee82c9 100644 --- a/frontend/src/hooks/api/identities/constants.tsx +++ b/frontend/src/hooks/api/identities/constants.tsx @@ -1,9 +1,11 @@ import { IdentityAuthMethod } from "./enums"; export const identityAuthToNameMap: { [I in IdentityAuthMethod]: string } = { + [IdentityAuthMethod.TOKEN_AUTH]: "Token Auth", [IdentityAuthMethod.UNIVERSAL_AUTH]: "Universal Auth", [IdentityAuthMethod.KUBERNETES_AUTH]: "Kubernetes Auth", [IdentityAuthMethod.GCP_AUTH]: "GCP Auth", [IdentityAuthMethod.AWS_AUTH]: "AWS Auth", - [IdentityAuthMethod.AZURE_AUTH]: "Azure Auth" + [IdentityAuthMethod.AZURE_AUTH]: "Azure Auth", + [IdentityAuthMethod.OIDC_AUTH]: "OIDC Auth" }; diff --git a/frontend/src/hooks/api/identities/enums.tsx b/frontend/src/hooks/api/identities/enums.tsx index 66af910939..5e445521a5 100644 --- a/frontend/src/hooks/api/identities/enums.tsx +++ b/frontend/src/hooks/api/identities/enums.tsx @@ -1,7 +1,9 @@ export enum IdentityAuthMethod { + TOKEN_AUTH = "token-auth", UNIVERSAL_AUTH = "universal-auth", KUBERNETES_AUTH = "kubernetes-auth", GCP_AUTH = "gcp-auth", AWS_AUTH = "aws-auth", - AZURE_AUTH = "azure-auth" + AZURE_AUTH = "azure-auth", + OIDC_AUTH = "oidc-auth" } diff --git a/frontend/src/hooks/api/identities/index.tsx b/frontend/src/hooks/api/identities/index.tsx index 41b03669b9..5c7bcc3e78 100644 --- a/frontend/src/hooks/api/identities/index.tsx +++ b/frontend/src/hooks/api/identities/index.tsx @@ -5,23 +5,40 @@ export { useAddIdentityAzureAuth, useAddIdentityGcpAuth, useAddIdentityKubernetesAuth, + useAddIdentityOidcAuth, + useAddIdentityTokenAuth, useAddIdentityUniversalAuth, useCreateIdentity, useCreateIdentityUniversalAuthClientSecret, + useCreateTokenIdentityTokenAuth, useDeleteIdentity, + useDeleteIdentityAwsAuth, + useDeleteIdentityAzureAuth, + useDeleteIdentityGcpAuth, + useDeleteIdentityKubernetesAuth, + useDeleteIdentityOidcAuth, + useDeleteIdentityTokenAuth, + useDeleteIdentityUniversalAuth, + useRevokeIdentityTokenAuthToken, useRevokeIdentityUniversalAuthClientSecret, useUpdateIdentity, useUpdateIdentityAwsAuth, useUpdateIdentityAzureAuth, useUpdateIdentityGcpAuth, useUpdateIdentityKubernetesAuth, - useUpdateIdentityUniversalAuth -} from "./mutations"; + useUpdateIdentityOidcAuth, + useUpdateIdentityTokenAuth, + useUpdateIdentityTokenAuthToken, + useUpdateIdentityUniversalAuth} from "./mutations"; export { useGetIdentityAwsAuth, useGetIdentityAzureAuth, + useGetIdentityById, useGetIdentityGcpAuth, useGetIdentityKubernetesAuth, + useGetIdentityOidcAuth, + useGetIdentityProjectMemberships, + useGetIdentityTokenAuth, + useGetIdentityTokensTokenAuth, useGetIdentityUniversalAuth, - useGetIdentityUniversalAuthClientSecrets -} from "./queries"; + useGetIdentityUniversalAuthClientSecrets} from "./queries"; diff --git a/frontend/src/hooks/api/identities/mutations.tsx b/frontend/src/hooks/api/identities/mutations.tsx index cb1fe4c170..21c4c560e1 100644 --- a/frontend/src/hooks/api/identities/mutations.tsx +++ b/frontend/src/hooks/api/identities/mutations.tsx @@ -9,25 +9,44 @@ import { AddIdentityAzureAuthDTO, AddIdentityGcpAuthDTO, AddIdentityKubernetesAuthDTO, + AddIdentityOidcAuthDTO, + AddIdentityTokenAuthDTO, AddIdentityUniversalAuthDTO, ClientSecretData, CreateIdentityDTO, CreateIdentityUniversalAuthClientSecretDTO, CreateIdentityUniversalAuthClientSecretRes, + CreateTokenIdentityTokenAuthDTO, + CreateTokenIdentityTokenAuthRes, + DeleteIdentityAwsAuthDTO, + DeleteIdentityAzureAuthDTO, DeleteIdentityDTO, + DeleteIdentityGcpAuthDTO, + DeleteIdentityKubernetesAuthDTO, + DeleteIdentityOidcAuthDTO, + DeleteIdentityTokenAuthDTO, DeleteIdentityUniversalAuthClientSecretDTO, + DeleteIdentityUniversalAuthDTO, Identity, + IdentityAccessToken, IdentityAwsAuth, IdentityAzureAuth, IdentityGcpAuth, IdentityKubernetesAuth, + IdentityOidcAuth, + IdentityTokenAuth, IdentityUniversalAuth, + RevokeTokenDTO, + RevokeTokenRes, UpdateIdentityAwsAuthDTO, UpdateIdentityAzureAuthDTO, UpdateIdentityDTO, UpdateIdentityGcpAuthDTO, UpdateIdentityKubernetesAuthDTO, - UpdateIdentityUniversalAuthDTO + UpdateIdentityOidcAuthDTO, + UpdateIdentityTokenAuthDTO, + UpdateIdentityUniversalAuthDTO, + UpdateTokenIdentityTokenAuthDTO } from "./types"; export const useCreateIdentity = () => { @@ -48,18 +67,20 @@ export const useCreateIdentity = () => { export const useUpdateIdentity = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async ({ identityId, name, role }) => { + mutationFn: async ({ identityId, name, role, metadata }) => { const { data: { identity } } = await apiRequest.patch(`/api/v1/identities/${identityId}`, { name, - role + role, + metadata }); return identity; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { organizationId, identityId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); } }); }; @@ -103,8 +124,10 @@ export const useAddIdentityUniversalAuth = () => { }); return identityUniversalAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityUniversalAuth(identityId)); } }); }; @@ -131,8 +154,27 @@ export const useUpdateIdentityUniversalAuth = () => { }); return identityUniversalAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityUniversalAuth(identityId)); + } + }); +}; + +export const useDeleteIdentityUniversalAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId }) => { + const { + data: { identityUniversalAuth } + } = await apiRequest.delete(`/api/v1/auth/universal-auth/identities/${identityId}`); + return identityUniversalAuth; + }, + onSuccess: (_, { organizationId, identityId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityUniversalAuth(identityId)); } }); }; @@ -214,8 +256,10 @@ export const useAddIdentityGcpAuth = () => { return identityGcpAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityGcpAuth(identityId)); } }); }; @@ -252,8 +296,27 @@ export const useUpdateIdentityGcpAuth = () => { return identityGcpAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityGcpAuth(identityId)); + } + }); +}; + +export const useDeleteIdentityGcpAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId }) => { + const { + data: { identityGcpAuth } + } = await apiRequest.delete(`/api/v1/auth/gcp-auth/identities/${identityId}`); + return identityGcpAuth; + }, + onSuccess: (_, { organizationId, identityId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityGcpAuth(identityId)); } }); }; @@ -288,8 +351,10 @@ export const useAddIdentityAwsAuth = () => { return identityAwsAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityAwsAuth(identityId)); } }); }; @@ -324,8 +389,132 @@ export const useUpdateIdentityAwsAuth = () => { return identityAwsAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityAwsAuth(identityId)); + } + }); +}; + +export const useDeleteIdentityAwsAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId }) => { + const { + data: { identityAwsAuth } + } = await apiRequest.delete(`/api/v1/auth/aws-auth/identities/${identityId}`); + return identityAwsAuth; + }, + onSuccess: (_, { organizationId, identityId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityAwsAuth(identityId)); + } + }); +}; + +export const useUpdateIdentityOidcAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + identityId, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps, + oidcDiscoveryUrl, + caCert, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject + }) => { + const { + data: { identityOidcAuth } + } = await apiRequest.patch<{ identityOidcAuth: IdentityOidcAuth }>( + `/api/v1/auth/oidc-auth/identities/${identityId}`, + { + oidcDiscoveryUrl, + caCert, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps + } + ); + + return identityOidcAuth; + }, + onSuccess: (_, { identityId, organizationId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityOidcAuth(identityId)); + } + }); +}; + +export const useAddIdentityOidcAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + identityId, + oidcDiscoveryUrl, + caCert, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps + }) => { + const { + data: { identityOidcAuth } + } = await apiRequest.post<{ identityOidcAuth: IdentityOidcAuth }>( + `/api/v1/auth/oidc-auth/identities/${identityId}`, + { + oidcDiscoveryUrl, + caCert, + boundIssuer, + boundAudiences, + boundClaims, + boundSubject, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps + } + ); + + return identityOidcAuth; + }, + onSuccess: (_, { identityId, organizationId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityOidcAuth(identityId)); + } + }); +}; + +export const useDeleteIdentityOidcAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId }) => { + const { + data: { identityOidcAuth } + } = await apiRequest.delete(`/api/v1/auth/oidc-auth/identities/${identityId}`); + return identityOidcAuth; + }, + onSuccess: (_, { organizationId, identityId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityOidcAuth(identityId)); } }); }; @@ -360,8 +549,10 @@ export const useAddIdentityAzureAuth = () => { return identityAzureAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityKubernetesAuth(identityId)); } }); }; @@ -402,8 +593,10 @@ export const useAddIdentityKubernetesAuth = () => { return identityKubernetesAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityAzureAuth(identityId)); } }); }; @@ -438,8 +631,27 @@ export const useUpdateIdentityAzureAuth = () => { return identityAzureAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityAzureAuth(identityId)); + } + }); +}; + +export const useDeleteIdentityAzureAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId }) => { + const { + data: { identityAzureAuth } + } = await apiRequest.delete(`/api/v1/auth/azure-auth/identities/${identityId}`); + return identityAzureAuth; + }, + onSuccess: (_, { organizationId, identityId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityAzureAuth(identityId)); } }); }; @@ -480,8 +692,164 @@ export const useUpdateIdentityKubernetesAuth = () => { return identityKubernetesAuth; }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { identityId, organizationId }) => { queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityKubernetesAuth(identityId)); + } + }); +}; + +export const useDeleteIdentityKubernetesAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId }) => { + const { + data: { identityKubernetesAuth } + } = await apiRequest.delete(`/api/v1/auth/kubernetes-auth/identities/${identityId}`); + return identityKubernetesAuth; + }, + onSuccess: (_, { organizationId, identityId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityKubernetesAuth(identityId)); + } + }); +}; + +export const useAddIdentityTokenAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + identityId, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps + }) => { + const { + data: { identityTokenAuth } + } = await apiRequest.post<{ identityTokenAuth: IdentityTokenAuth }>( + `/api/v1/auth/token-auth/identities/${identityId}`, + { + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps + } + ); + + return identityTokenAuth; + }, + onSuccess: (_, { identityId, organizationId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityUniversalAuth(identityId)); + } + }); +}; + +export const useUpdateIdentityTokenAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + identityId, + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps + }) => { + const { + data: { identityTokenAuth } + } = await apiRequest.patch<{ identityTokenAuth: IdentityTokenAuth }>( + `/api/v1/auth/token-auth/identities/${identityId}`, + { + accessTokenTTL, + accessTokenMaxTTL, + accessTokenNumUsesLimit, + accessTokenTrustedIps + } + ); + + return identityTokenAuth; + }, + onSuccess: (_, { identityId, organizationId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityUniversalAuth(identityId)); + } + }); +}; + +export const useDeleteIdentityTokenAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId }) => { + const { + data: { identityTokenAuth } + } = await apiRequest.delete(`/api/v1/auth/token-auth/identities/${identityId}`); + return identityTokenAuth; + }, + onSuccess: (_, { organizationId, identityId }) => { + queryClient.invalidateQueries(organizationKeys.getOrgIdentityMemberships(organizationId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityById(identityId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityTokenAuth(identityId)); + } + }); +}; + +export const useCreateTokenIdentityTokenAuth = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ identityId, name }) => { + const { data } = await apiRequest.post( + `/api/v1/auth/token-auth/identities/${identityId}/tokens`, + { + name + } + ); + + return data; + }, + onSuccess: (_, { identityId }) => { + queryClient.invalidateQueries(identitiesKeys.getIdentityTokensTokenAuth(identityId)); + } + }); +}; + +export const useUpdateIdentityTokenAuthToken = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ tokenId, name }) => { + const { + data: { token } + } = await apiRequest.patch<{ token: IdentityAccessToken }>( + `/api/v1/auth/token-auth/tokens/${tokenId}`, + { + name + } + ); + + return token; + }, + onSuccess: (_, { identityId }) => { + queryClient.invalidateQueries(identitiesKeys.getIdentityTokensTokenAuth(identityId)); + } + }); +}; + +export const useRevokeIdentityTokenAuthToken = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ tokenId }) => { + const { data } = await apiRequest.post( + `/api/v1/auth/token-auth/tokens/${tokenId}/revoke` + ); + + return data; + }, + onSuccess: (_, { identityId }) => { + queryClient.invalidateQueries(identitiesKeys.getIdentityTokensTokenAuth(identityId)); } }); }; diff --git a/frontend/src/hooks/api/identities/queries.tsx b/frontend/src/hooks/api/identities/queries.tsx index eb04227eb6..c5c4424076 100644 --- a/frontend/src/hooks/api/identities/queries.tsx +++ b/frontend/src/hooks/api/identities/queries.tsx @@ -1,16 +1,23 @@ -import { useQuery } from "@tanstack/react-query"; +import { useQuery, UseQueryOptions } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; import { ClientSecretData, + IdentityAccessToken, IdentityAwsAuth, IdentityAzureAuth, IdentityGcpAuth, IdentityKubernetesAuth, - IdentityUniversalAuth} from "./types"; + IdentityMembership, + IdentityMembershipOrg, + IdentityOidcAuth, + IdentityTokenAuth, + IdentityUniversalAuth +} from "./types"; export const identitiesKeys = { + getIdentityById: (identityId: string) => [{ identityId }, "identity"] as const, getIdentityUniversalAuth: (identityId: string) => [{ identityId }, "identity-universal-auth"] as const, getIdentityUniversalAuthClientSecrets: (identityId: string) => @@ -18,13 +25,56 @@ export const identitiesKeys = { getIdentityKubernetesAuth: (identityId: string) => [{ identityId }, "identity-kubernetes-auth"] as const, getIdentityGcpAuth: (identityId: string) => [{ identityId }, "identity-gcp-auth"] as const, + getIdentityOidcAuth: (identityId: string) => [{ identityId }, "identity-oidc-auth"] as const, getIdentityAwsAuth: (identityId: string) => [{ identityId }, "identity-aws-auth"] as const, - getIdentityAzureAuth: (identityId: string) => [{ identityId }, "identity-azure-auth"] as const + getIdentityAzureAuth: (identityId: string) => [{ identityId }, "identity-azure-auth"] as const, + getIdentityTokenAuth: (identityId: string) => [{ identityId }, "identity-token-auth"] as const, + getIdentityTokensTokenAuth: (identityId: string) => + [{ identityId }, "identity-tokens-token-auth"] as const, + getIdentityProjectMemberships: (identityId: string) => + [{ identityId }, "identity-project-memberships"] as const }; -export const useGetIdentityUniversalAuth = (identityId: string) => { +export const useGetIdentityById = (identityId: string) => { return useQuery({ enabled: Boolean(identityId), + queryKey: identitiesKeys.getIdentityById(identityId), + queryFn: async () => { + const { + data: { identity } + } = await apiRequest.get<{ identity: IdentityMembershipOrg }>( + `/api/v1/identities/${identityId}` + ); + return identity; + } + }); +}; + +export const useGetIdentityProjectMemberships = (identityId: string) => { + return useQuery({ + enabled: Boolean(identityId), + queryKey: identitiesKeys.getIdentityProjectMemberships(identityId), + queryFn: async () => { + const { + data: { identityMemberships } + } = await apiRequest.get<{ identityMemberships: IdentityMembership[] }>( + `/api/v1/identities/${identityId}/identity-memberships` + ); + return identityMemberships; + } + }); +}; + +export const useGetIdentityUniversalAuth = ( + identityId: string, + options?: UseQueryOptions< + IdentityUniversalAuth, + unknown, + IdentityUniversalAuth, + ReturnType + > +) => { + return useQuery({ queryKey: identitiesKeys.getIdentityUniversalAuth(identityId), queryFn: async () => { const { @@ -33,7 +83,11 @@ export const useGetIdentityUniversalAuth = (identityId: string) => { `/api/v1/auth/universal-auth/identities/${identityId}` ); return identityUniversalAuth; - } + }, + cacheTime: 0, + staleTime: 0, + ...options, + enabled: Boolean(identityId) && (options?.enabled ?? true) }); }; @@ -52,9 +106,16 @@ export const useGetIdentityUniversalAuthClientSecrets = (identityId: string) => }); }; -export const useGetIdentityGcpAuth = (identityId: string) => { +export const useGetIdentityGcpAuth = ( + identityId: string, + options?: UseQueryOptions< + IdentityGcpAuth, + unknown, + IdentityGcpAuth, + ReturnType + > +) => { return useQuery({ - enabled: Boolean(identityId), queryKey: identitiesKeys.getIdentityGcpAuth(identityId), queryFn: async () => { const { @@ -63,13 +124,24 @@ export const useGetIdentityGcpAuth = (identityId: string) => { `/api/v1/auth/gcp-auth/identities/${identityId}` ); return identityGcpAuth; - } + }, + staleTime: 0, + cacheTime: 0, + ...options, + enabled: Boolean(identityId) && (options?.enabled ?? true) }); }; -export const useGetIdentityAwsAuth = (identityId: string) => { +export const useGetIdentityAwsAuth = ( + identityId: string, + options?: UseQueryOptions< + IdentityAwsAuth, + unknown, + IdentityAwsAuth, + ReturnType + > +) => { return useQuery({ - enabled: Boolean(identityId), queryKey: identitiesKeys.getIdentityAwsAuth(identityId), queryFn: async () => { const { @@ -78,13 +150,24 @@ export const useGetIdentityAwsAuth = (identityId: string) => { `/api/v1/auth/aws-auth/identities/${identityId}` ); return identityAwsAuth; - } + }, + staleTime: 0, + cacheTime: 0, + ...options, + enabled: Boolean(identityId) && (options?.enabled ?? true) }); }; -export const useGetIdentityAzureAuth = (identityId: string) => { +export const useGetIdentityAzureAuth = ( + identityId: string, + options?: UseQueryOptions< + IdentityAzureAuth, + unknown, + IdentityAzureAuth, + ReturnType + > +) => { return useQuery({ - enabled: Boolean(identityId), queryKey: identitiesKeys.getIdentityAzureAuth(identityId), queryFn: async () => { const { @@ -93,13 +176,24 @@ export const useGetIdentityAzureAuth = (identityId: string) => { `/api/v1/auth/azure-auth/identities/${identityId}` ); return identityAzureAuth; - } + }, + staleTime: 0, + cacheTime: 0, + ...options, + enabled: Boolean(identityId) && (options?.enabled ?? true) }); }; -export const useGetIdentityKubernetesAuth = (identityId: string) => { +export const useGetIdentityKubernetesAuth = ( + identityId: string, + options?: UseQueryOptions< + IdentityKubernetesAuth, + unknown, + IdentityKubernetesAuth, + ReturnType + > +) => { return useQuery({ - enabled: Boolean(identityId), queryKey: identitiesKeys.getIdentityKubernetesAuth(identityId), queryFn: async () => { const { @@ -108,6 +202,77 @@ export const useGetIdentityKubernetesAuth = (identityId: string) => { `/api/v1/auth/kubernetes-auth/identities/${identityId}` ); return identityKubernetesAuth; + }, + staleTime: 0, + cacheTime: 0, + ...options, + enabled: Boolean(identityId) && (options?.enabled ?? true) + }); +}; + +export const useGetIdentityTokenAuth = ( + identityId: string, + options?: UseQueryOptions< + IdentityTokenAuth, + unknown, + IdentityTokenAuth, + ReturnType + > +) => { + return useQuery({ + queryKey: identitiesKeys.getIdentityTokenAuth(identityId), + queryFn: async () => { + const { + data: { identityTokenAuth } + } = await apiRequest.get<{ identityTokenAuth: IdentityTokenAuth }>( + `/api/v1/auth/token-auth/identities/${identityId}` + ); + return identityTokenAuth; + }, + staleTime: 0, + cacheTime: 0, + ...options, + enabled: Boolean(identityId) && (options?.enabled ?? true) + }); +}; + +export const useGetIdentityTokensTokenAuth = (identityId: string) => { + return useQuery({ + enabled: Boolean(identityId), + queryKey: identitiesKeys.getIdentityTokensTokenAuth(identityId), + queryFn: async () => { + const { + data: { tokens } + } = await apiRequest.get<{ tokens: IdentityAccessToken[] }>( + `/api/v1/auth/token-auth/identities/${identityId}/tokens` + ); + return tokens; } }); }; + +export const useGetIdentityOidcAuth = ( + identityId: string, + options?: UseQueryOptions< + IdentityOidcAuth, + unknown, + IdentityOidcAuth, + ReturnType + > +) => { + return useQuery({ + queryKey: identitiesKeys.getIdentityOidcAuth(identityId), + queryFn: async () => { + const { + data: { identityOidcAuth } + } = await apiRequest.get<{ identityOidcAuth: IdentityOidcAuth }>( + `/api/v1/auth/oidc-auth/identities/${identityId}` + ); + return identityOidcAuth; + }, + staleTime: 0, + cacheTime: 0, + ...options, + enabled: Boolean(identityId) && (options?.enabled ?? true) + }); +}; diff --git a/frontend/src/hooks/api/identities/types.ts b/frontend/src/hooks/api/identities/types.ts index 80d066c720..559a019749 100644 --- a/frontend/src/hooks/api/identities/types.ts +++ b/frontend/src/hooks/api/identities/types.ts @@ -1,4 +1,5 @@ import { TOrgRole } from "../roles/types"; +import { ProjectUserMembershipTemporaryMode, Workspace } from "../workspace/types"; import { IdentityAuthMethod } from "./enums"; export type IdentityTrustedIp = { @@ -11,15 +12,32 @@ export type IdentityTrustedIp = { export type Identity = { id: string; name: string; - authMethod?: IdentityAuthMethod; + authMethods: IdentityAuthMethod[]; createdAt: string; updatedAt: string; }; +export type IdentityAccessToken = { + id: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUses: number; + accessTokenNumUsesLimit: number; + accessTokenLastUsedAt: string | null; + accessTokenLastRenewedAt: string | null; + isAccessTokenRevoked: boolean; + identityUAClientSecretId: string | null; + identityId: string; + createdAt: string; + updatedAt: string; + name: string | null; +}; + export type IdentityMembershipOrg = { id: string; identity: Identity; organization: string; + metadata: { key: string; value: string; id: string }[]; role: "admin" | "member" | "viewer" | "no-access" | "custom"; customRole?: TOrgRole; createdAt: string; @@ -29,6 +47,7 @@ export type IdentityMembershipOrg = { export type IdentityMembership = { id: string; identity: Identity; + project: Pick; roles: Array< { id: string; @@ -47,7 +66,7 @@ export type IdentityMembership = { | { isTemporary: true; temporaryRange: string; - temporaryMode: string; + temporaryMode: ProjectUserMembershipTemporaryMode; temporaryAccessEndTime: string; temporaryAccessStartTime: string; } @@ -61,6 +80,7 @@ export type CreateIdentityDTO = { name: string; organizationId: string; role?: string; + metadata?: { key: string; value: string }[]; }; export type UpdateIdentityDTO = { @@ -68,6 +88,7 @@ export type UpdateIdentityDTO = { name?: string; role?: string; organizationId: string; + metadata?: { key: string; value: string }[]; }; export type DeleteIdentityDTO = { @@ -113,6 +134,11 @@ export type UpdateIdentityUniversalAuthDTO = { }[]; }; +export type DeleteIdentityUniversalAuthDTO = { + organizationId: string; + identityId: string; +}; + export type IdentityGcpAuth = { identityId: string; type: "iam" | "gce"; @@ -155,6 +181,64 @@ export type UpdateIdentityGcpAuthDTO = { }[]; }; +export type DeleteIdentityGcpAuthDTO = { + organizationId: string; + identityId: string; +}; + +export type IdentityOidcAuth = { + identityId: string; + oidcDiscoveryUrl: string; + caCert: string; + boundIssuer: string; + boundAudiences: string; + boundClaims: Record; + boundSubject: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: IdentityTrustedIp[]; +}; + +export type AddIdentityOidcAuthDTO = { + organizationId: string; + identityId: string; + oidcDiscoveryUrl: string; + caCert: string; + boundIssuer: string; + boundAudiences: string; + boundClaims: Record; + boundSubject: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: { + ipAddress: string; + }[]; +}; + +export type UpdateIdentityOidcAuthDTO = { + organizationId: string; + identityId: string; + oidcDiscoveryUrl?: string; + caCert?: string; + boundIssuer?: string; + boundAudiences?: string; + boundClaims?: Record; + boundSubject?: string; + accessTokenTTL?: number; + accessTokenMaxTTL?: number; + accessTokenNumUsesLimit?: number; + accessTokenTrustedIps?: { + ipAddress: string; + }[]; +}; + +export type DeleteIdentityOidcAuthDTO = { + organizationId: string; + identityId: string; +}; + export type IdentityAwsAuth = { identityId: string; type: "iam"; @@ -195,6 +279,11 @@ export type UpdateIdentityAwsAuthDTO = { }[]; }; +export type DeleteIdentityAwsAuthDTO = { + organizationId: string; + identityId: string; +}; + export type IdentityAzureAuth = { identityId: string; tenantId: string; @@ -234,6 +323,11 @@ export type UpdateIdentityAzureAuthDTO = { }[]; }; +export type DeleteIdentityAzureAuthDTO = { + organizationId: string; + identityId: string; +}; + export type IdentityKubernetesAuth = { identityId: string; kubernetesHost: string; @@ -282,6 +376,11 @@ export type UpdateIdentityKubernetesAuthDTO = { }[]; }; +export type DeleteIdentityKubernetesAuthDTO = { + organizationId: string; + identityId: string; +}; + export type CreateIdentityUniversalAuthClientSecretDTO = { identityId: string; description?: string; @@ -311,3 +410,70 @@ export type DeleteIdentityUniversalAuthClientSecretDTO = { identityId: string; clientSecretId: string; }; + +export type IdentityTokenAuth = { + identityId: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: IdentityTrustedIp[]; +}; + +export type AddIdentityTokenAuthDTO = { + organizationId: string; + identityId: string; + accessTokenTTL: number; + accessTokenMaxTTL: number; + accessTokenNumUsesLimit: number; + accessTokenTrustedIps: { + ipAddress: string; + }[]; +}; + +export type UpdateIdentityTokenAuthDTO = { + organizationId: string; + identityId: string; + accessTokenTTL?: number; + accessTokenMaxTTL?: number; + accessTokenNumUsesLimit?: number; + accessTokenTrustedIps?: { + ipAddress: string; + }[]; +}; + +export type DeleteIdentityTokenAuthDTO = { + organizationId: string; + identityId: string; +}; + +export type CreateTokenIdentityTokenAuthDTO = { + identityId: string; + name: string; +}; + +export type CreateTokenIdentityTokenAuthRes = { + accessToken: string; + tokenType: string; + expiresIn: number; + accessTokenMaxTTL: number; +}; + +export type UpdateTokenIdentityTokenAuthDTO = { + identityId: string; + tokenId: string; + name?: string; +}; + +export type RevokeTokenDTO = { + identityId: string; + tokenId: string; +}; + +export type RevokeTokenRes = { + message: string; +}; + +export type TProjectIdentitiesList = { + identityMemberships: IdentityMembership[]; + totalCount: number; +}; diff --git a/frontend/src/hooks/api/identityProjectAdditionalPrivilege/mutation.tsx b/frontend/src/hooks/api/identityProjectAdditionalPrivilege/mutation.tsx index bb3f6ca88b..ab8a2e5d17 100644 --- a/frontend/src/hooks/api/identityProjectAdditionalPrivilege/mutation.tsx +++ b/frontend/src/hooks/api/identityProjectAdditionalPrivilege/mutation.tsx @@ -15,16 +15,11 @@ export const useCreateIdentityProjectAdditionalPrivilege = () => { return useMutation({ mutationFn: async (dto) => { - const { data } = await apiRequest.post( - "/api/v1/additional-privilege/identity/permanent", - dto - ); + const { data } = await apiRequest.post("/api/v2/identity-project-additional-privilege", dto); return data.privilege; }, - onSuccess: (_, { projectSlug, identityId }) => { - queryClient.invalidateQueries( - identitiyProjectPrivilegeKeys.list({ projectSlug, identityId }) - ); + onSuccess: (_, { projectId, identityId }) => { + queryClient.invalidateQueries(identitiyProjectPrivilegeKeys.list({ projectId, identityId })); } }); }; @@ -33,19 +28,22 @@ export const useUpdateIdentityProjectAdditionalPrivilege = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async ({ privilegeSlug, projectSlug, identityId, privilegeDetails }) => { - const { data: res } = await apiRequest.patch("/api/v1/additional-privilege/identity", { - privilegeSlug, - projectSlug, - identityId, - privilegeDetails - }); + mutationFn: async ({ projectId, privilegeId, identityId, permissions, slug, type }) => { + const { data: res } = await apiRequest.patch( + `/api/v2/identity-project-additional-privilege/${privilegeId}`, + { + privilegeId, + projectId, + identityId, + permissions, + slug, + type + } + ); return res.privilege; }, - onSuccess: (_, { projectSlug, identityId }) => { - queryClient.invalidateQueries( - identitiyProjectPrivilegeKeys.list({ projectSlug, identityId }) - ); + onSuccess: (_, { projectId, identityId }) => { + queryClient.invalidateQueries(identitiyProjectPrivilegeKeys.list({ projectId, identityId })); } }); }; @@ -54,20 +52,21 @@ export const useDeleteIdentityProjectAdditionalPrivilege = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async ({ identityId, projectSlug, privilegeSlug }) => { - const { data } = await apiRequest.delete("/api/v1/additional-privilege/identity", { - data: { - identityId, - projectSlug, - privilegeSlug + mutationFn: async ({ identityId, projectId, privilegeId }) => { + const { data } = await apiRequest.delete( + `/api/v2/identity-project-additional-privilege/${privilegeId}`, + { + data: { + identityId, + privilegeId, + projectId + } } - }); + ); return data.privilege; }, - onSuccess: (_, { projectSlug, identityId }) => { - queryClient.invalidateQueries( - identitiyProjectPrivilegeKeys.list({ projectSlug, identityId }) - ); + onSuccess: (_, { projectId, identityId }) => { + queryClient.invalidateQueries(identitiyProjectPrivilegeKeys.list({ projectId, identityId })); } }); }; diff --git a/frontend/src/hooks/api/identityProjectAdditionalPrivilege/queries.tsx b/frontend/src/hooks/api/identityProjectAdditionalPrivilege/queries.tsx index e4bd141fb3..8504f0423a 100644 --- a/frontend/src/hooks/api/identityProjectAdditionalPrivilege/queries.tsx +++ b/frontend/src/hooks/api/identityProjectAdditionalPrivilege/queries.tsx @@ -9,36 +9,36 @@ import { } from "./types"; export const identitiyProjectPrivilegeKeys = { - details: ({ identityId, privilegeSlug, projectSlug }: TGetIdentityProjectPrivilegeDetails) => + details: ({ identityId, privilegeId, projectId }: TGetIdentityProjectPrivilegeDetails) => [ "identity-user-privilege", { identityId, - projectSlug, - privilegeSlug + projectId, + privilegeId } ] as const, - list: ({ projectSlug, identityId }: TListIdentityProjectPrivileges) => - ["identity-user-privileges", { identityId, projectSlug }] as const + list: ({ projectId, identityId }: TListIdentityProjectPrivileges) => + ["identity-user-privileges", { identityId, projectId }] as const }; export const useGetIdentityProjectPrivilegeDetails = ({ - projectSlug, + projectId, identityId, - privilegeSlug + privilegeId }: TGetIdentityProjectPrivilegeDetails) => { return useQuery({ - enabled: Boolean(projectSlug && identityId && privilegeSlug), - queryKey: identitiyProjectPrivilegeKeys.details({ projectSlug, privilegeSlug, identityId }), + enabled: Boolean(projectId && identityId && privilegeId), + queryKey: identitiyProjectPrivilegeKeys.details({ projectId, privilegeId, identityId }), queryFn: async () => { const { data: { privilege } } = await apiRequest.get<{ privilege: TIdentityProjectPrivilege; - }>(`/api/v1/additional-privilege/identity/${privilegeSlug}`, { + }>(`/api/v2/identity-project-additional-privilege/${privilegeId}`, { params: { identityId, - projectSlug + projectId } }); return privilege; @@ -47,19 +47,19 @@ export const useGetIdentityProjectPrivilegeDetails = ({ }; export const useListIdentityProjectPrivileges = ({ - projectSlug, + projectId, identityId }: TListIdentityProjectPrivileges) => { return useQuery({ - enabled: Boolean(projectSlug && identityId), - queryKey: identitiyProjectPrivilegeKeys.list({ projectSlug, identityId }), + enabled: Boolean(projectId && identityId), + queryKey: identitiyProjectPrivilegeKeys.list({ projectId, identityId }), queryFn: async () => { const { data: { privileges } } = await apiRequest.get<{ privileges: Array; - }>("/api/v1/additional-privilege/identity", { - params: { identityId, projectSlug } + }>("/api/v2/identity-project-additional-privilege", { + params: { identityId, projectId } }); return privileges; } diff --git a/frontend/src/hooks/api/identityProjectAdditionalPrivilege/types.tsx b/frontend/src/hooks/api/identityProjectAdditionalPrivilege/types.tsx index df04f3e8ab..1e070e2bb7 100644 --- a/frontend/src/hooks/api/identityProjectAdditionalPrivilege/types.tsx +++ b/frontend/src/hooks/api/identityProjectAdditionalPrivilege/types.tsx @@ -28,48 +28,42 @@ export type TIdentityProjectPrivilege = { } ); -export type TProjectSpecificPrivilegePermission = { - conditions: { - environment: string; - secretPath?: { $glob: string }; - }; - actions: string[]; - subject: string; -}; - export type TCreateIdentityProjectPrivilegeDTO = { identityId: string; - projectSlug: string; + projectId: string; slug?: string; - isTemporary?: boolean; - temporaryMode?: IdentityProjectAdditionalPrivilegeTemporaryMode; - temporaryRange?: string; - temporaryAccessStartTime?: string; - privilegePermission: TProjectSpecificPrivilegePermission; + type: + | { + isTemporary: true; + temporaryMode?: IdentityProjectAdditionalPrivilegeTemporaryMode; + temporaryRange?: string; + temporaryAccessStartTime?: string; + } + | { + isTemporary: false; + }; + permissions: TProjectPermission[]; }; export type TUpdateIdentityProjectPrivlegeDTO = { - projectSlug: string; + projectId: string; identityId: string; - privilegeSlug: string; - privilegeDetails: Partial< - Omit - >; -}; + privilegeId: string; +} & Partial>; export type TDeleteIdentityProjectPrivilegeDTO = { - projectSlug: string; + projectId: string; identityId: string; - privilegeSlug: string; + privilegeId: string; }; export type TListIdentityUserPrivileges = { - projectSlug: string; + projectId: string; identityId: string; }; export type TGetIdentityProejctPrivilegeDetails = { - projectSlug: string; + projectId: string; identityId: string; - privilegeSlug: string; + privilegeId: string; }; diff --git a/frontend/src/hooks/api/index.tsx b/frontend/src/hooks/api/index.tsx index 61e8cb6665..551822f09b 100644 --- a/frontend/src/hooks/api/index.tsx +++ b/frontend/src/hooks/api/index.tsx @@ -5,6 +5,9 @@ export * from "./auditLogs"; export * from "./auditLogStreams"; export * from "./auth"; export * from "./bots"; +export * from "./ca"; +export * from "./certificates"; +export * from "./certificateTemplates"; export * from "./dynamicSecret"; export * from "./dynamicSecretLease"; export * from "./groups"; @@ -14,9 +17,15 @@ export * from "./incidentContacts"; export * from "./integrationAuth"; export * from "./integrations"; export * from "./keys"; +export * from "./kms"; export * from "./ldapConfig"; +export * from "./oidcConfig"; +export * from "./orgAdmin"; export * from "./organization"; +export * from "./pkiAlerts"; +export * from "./pkiCollections"; export * from "./projectUserAdditionalPrivilege"; +export * from "./rateLimit"; export * from "./roles"; export * from "./scim"; export * from "./secretApproval"; @@ -25,6 +34,7 @@ export * from "./secretFolders"; export * from "./secretImports"; export * from "./secretRotation"; export * from "./secrets"; +export * from "./secretSharing"; export * from "./secretSnapshots"; export * from "./serverDetails"; export * from "./serviceTokens"; @@ -34,4 +44,5 @@ export * from "./tags"; export * from "./trustedIps"; export * from "./users"; export * from "./webhooks"; +export * from "./workflowIntegrations"; export * from "./workspace"; diff --git a/frontend/src/hooks/api/integrationAuth/index.tsx b/frontend/src/hooks/api/integrationAuth/index.tsx index 5f0503eee7..0ae3511def 100644 --- a/frontend/src/hooks/api/integrationAuth/index.tsx +++ b/frontend/src/hooks/api/integrationAuth/index.tsx @@ -1,3 +1,4 @@ +export { useDuplicateIntegrationAuth } from "./mutations"; export { useAuthorizeIntegration, useDeleteIntegrationAuth, diff --git a/frontend/src/hooks/api/integrationAuth/mutations.tsx b/frontend/src/hooks/api/integrationAuth/mutations.tsx new file mode 100644 index 0000000000..b7a3f18bd6 --- /dev/null +++ b/frontend/src/hooks/api/integrationAuth/mutations.tsx @@ -0,0 +1,19 @@ +import { useMutation } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { IntegrationAuth, TDuplicateIntegrationAuthDTO } from "./types"; + +// For now, this should only be used in the Github app integration flow. +export const useDuplicateIntegrationAuth = () => { + return useMutation({ + mutationFn: async (body) => { + const { data } = await apiRequest.post<{ integrationAuth: IntegrationAuth }>( + `/api/v1/integration-auth/${body.integrationAuthId}/duplicate`, + body + ); + + return data.integrationAuth; + } + }); +}; diff --git a/frontend/src/hooks/api/integrationAuth/queries.tsx b/frontend/src/hooks/api/integrationAuth/queries.tsx index d800e53130..69f2d99d89 100644 --- a/frontend/src/hooks/api/integrationAuth/queries.tsx +++ b/frontend/src/hooks/api/integrationAuth/queries.tsx @@ -1,10 +1,11 @@ -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useMutation, useQuery, useQueryClient, UseQueryOptions } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; -import { workspaceKeys } from "../workspace/queries"; +import { workspaceKeys } from "../workspace"; import { App, + BitBucketEnvironment, BitBucketWorkspace, ChecklyGroup, Environment, @@ -94,6 +95,17 @@ const integrationAuthKeys = { }) => [{ integrationAuthId, appId }, "integrationAuthRailwayServices"] as const, getIntegrationAuthBitBucketWorkspaces: (integrationAuthId: string) => [{ integrationAuthId }, "integrationAuthBitbucketWorkspaces"] as const, + getIntegrationAuthBitBucketEnvironments: ( + integrationAuthId: string, + workspaceSlug: string, + repoSlug: string + ) => + [ + { integrationAuthId }, + workspaceSlug, + repoSlug, + "integrationAuthBitbucketEnvironments" + ] as const, getIntegrationAuthNorthflankSecretGroups: ({ integrationAuthId, appId @@ -120,16 +132,22 @@ const fetchIntegrationAuthById = async (integrationAuthId: string) => { const fetchIntegrationAuthApps = async ({ integrationAuthId, teamId, + azureDevOpsOrgName, workspaceSlug }: { integrationAuthId: string; teamId?: string; + azureDevOpsOrgName?: string; workspaceSlug?: string; }) => { const params: Record = {}; if (teamId) { params.teamId = teamId; } + if (azureDevOpsOrgName) { + params.azureDevOpsOrgName = azureDevOpsOrgName; + } + if (workspaceSlug) { params.workspaceSlug = workspaceSlug; } @@ -397,6 +415,25 @@ const fetchIntegrationAuthBitBucketWorkspaces = async (integrationAuthId: string return workspaces; }; +const fetchIntegrationAuthBitBucketEnvironments = async ( + integrationAuthId: string, + workspaceSlug: string, + repoSlug: string +) => { + const { + data: { environments } + } = await apiRequest.get<{ environments: BitBucketEnvironment[] }>( + `/api/v1/integration-auth/${integrationAuthId}/bitbucket/environments`, + { + params: { + workspaceSlug, + repoSlug + } + } + ); + return environments; +}; + const fetchIntegrationAuthNorthflankSecretGroups = async ({ integrationAuthId, appId @@ -452,10 +489,12 @@ export const useGetIntegrationAuthById = (integrationAuthId: string) => { export const useGetIntegrationAuthApps = ({ integrationAuthId, teamId, + azureDevOpsOrgName, workspaceSlug }: { integrationAuthId: string; teamId?: string; + azureDevOpsOrgName?: string; workspaceSlug?: string; }) => { return useQuery({ @@ -464,6 +503,7 @@ export const useGetIntegrationAuthApps = ({ fetchIntegrationAuthApps({ integrationAuthId, teamId, + azureDevOpsOrgName, workspaceSlug }), enabled: true @@ -718,6 +758,30 @@ export const useGetIntegrationAuthBitBucketWorkspaces = (integrationAuthId: stri }); }; +export const useGetIntegrationAuthBitBucketEnvironments = ( + { + integrationAuthId, + workspaceSlug, + repoSlug + }: { + integrationAuthId: string; + workspaceSlug: string; + repoSlug: string; + }, + options?: UseQueryOptions +) => { + return useQuery({ + queryKey: integrationAuthKeys.getIntegrationAuthBitBucketEnvironments( + integrationAuthId, + workspaceSlug, + repoSlug + ), + queryFn: () => + fetchIntegrationAuthBitBucketEnvironments(integrationAuthId, workspaceSlug, repoSlug), + ...options + }); +}; + export const useGetIntegrationAuthNorthflankSecretGroups = ({ integrationAuthId, appId @@ -768,11 +832,13 @@ export const useAuthorizeIntegration = () => { workspaceId, code, integration, + installationId, url }: { workspaceId: string; code: string; integration: string; + installationId?: string; url?: string; }) => { const { @@ -781,6 +847,7 @@ export const useAuthorizeIntegration = () => { workspaceId, code, integration, + installationId, url }); @@ -802,6 +869,7 @@ export const useSaveIntegrationAccessToken = () => { refreshToken, accessId, accessToken, + awsAssumeIamRoleArn, url, namespace }: { @@ -810,6 +878,7 @@ export const useSaveIntegrationAccessToken = () => { refreshToken?: string; accessId?: string; accessToken?: string; + awsAssumeIamRoleArn?: string; url?: string; namespace?: string; }) => { @@ -821,6 +890,7 @@ export const useSaveIntegrationAccessToken = () => { refreshToken, accessId, accessToken, + awsAssumeIamRoleArn, url, namespace }); diff --git a/frontend/src/hooks/api/integrationAuth/types.ts b/frontend/src/hooks/api/integrationAuth/types.ts index b73528384c..ec24ce8440 100644 --- a/frontend/src/hooks/api/integrationAuth/types.ts +++ b/frontend/src/hooks/api/integrationAuth/types.ts @@ -9,6 +9,10 @@ export type IntegrationAuth = { keyEncoding: string; url?: string; teamId?: string; + metadata: { + installationName?: string; + installationId?: string; + }; }; export type App = { @@ -75,6 +79,12 @@ export type BitBucketWorkspace = { slug: string; }; +export type BitBucketEnvironment = { + uuid: string; + name: string; + slug: string; +}; + export type NorthflankSecretGroup = { name: string; groupId: string; @@ -84,3 +94,8 @@ export type TeamCityBuildConfig = { name: string; buildConfigId: string; }; + +export type TDuplicateIntegrationAuthDTO = { + integrationAuthId: string; + projectId: string; +}; diff --git a/frontend/src/hooks/api/integrations/index.tsx b/frontend/src/hooks/api/integrations/index.tsx index f91d85644d..9d43c33ad2 100644 --- a/frontend/src/hooks/api/integrations/index.tsx +++ b/frontend/src/hooks/api/integrations/index.tsx @@ -1 +1,6 @@ -export { useCreateIntegration, useDeleteIntegration, useGetCloudIntegrations } from "./queries"; +export { + useCreateIntegration, + useDeleteIntegration, + useGetCloudIntegrations, + useGetIntegration +} from "./queries"; diff --git a/frontend/src/hooks/api/integrations/queries.tsx b/frontend/src/hooks/api/integrations/queries.tsx index 81d0f00cae..56131f6419 100644 --- a/frontend/src/hooks/api/integrations/queries.tsx +++ b/frontend/src/hooks/api/integrations/queries.tsx @@ -1,13 +1,14 @@ -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useMutation, useQuery, useQueryClient, UseQueryOptions } from "@tanstack/react-query"; import { createNotification } from "@app/components/notifications"; import { apiRequest } from "@app/config/request"; -import { workspaceKeys } from "../workspace/queries"; -import { TCloudIntegration } from "./types"; +import { workspaceKeys } from "../workspace"; +import { TCloudIntegration, TIntegrationWithEnv } from "./types"; export const integrationQueryKeys = { - getIntegrations: () => ["integrations"] as const + getIntegrations: () => ["integrations"] as const, + getIntegration: (id: string) => ["integration", id] as const }; const fetchIntegrations = async () => { @@ -18,6 +19,14 @@ const fetchIntegrations = async () => { return data.integrationOptions; }; +const fetchIntegration = async (id: string) => { + const { data } = await apiRequest.get<{ integration: TIntegrationWithEnv }>( + `/api/v1/integration/${id}` + ); + + return data.integration; +}; + export const useGetCloudIntegrations = () => useQuery({ queryKey: integrationQueryKeys.getIntegrations(), @@ -71,6 +80,8 @@ export const useCreateIntegration = () => { key: string; value: string; }[]; + githubVisibility?: string; + githubVisibilityRepoIds?: string[]; kmsKeyId?: string; shouldDisableDelete?: boolean; shouldMaskSecrets?: boolean; @@ -110,8 +121,15 @@ export const useCreateIntegration = () => { export const useDeleteIntegration = () => { const queryClient = useQueryClient(); - return useMutation<{}, {}, { id: string; workspaceId: string }>({ - mutationFn: ({ id }) => apiRequest.delete(`/api/v1/integration/${id}`), + return useMutation< + {}, + {}, + { id: string; workspaceId: string; shouldDeleteIntegrationSecrets: boolean } + >({ + mutationFn: ({ id, shouldDeleteIntegrationSecrets }) => + apiRequest.delete( + `/api/v1/integration/${id}?shouldDeleteIntegrationSecrets=${shouldDeleteIntegrationSecrets}` + ), onSuccess: (_, { workspaceId }) => { queryClient.invalidateQueries(workspaceKeys.getWorkspaceIntegrations(workspaceId)); queryClient.invalidateQueries(workspaceKeys.getWorkspaceAuthorization(workspaceId)); @@ -119,6 +137,26 @@ export const useDeleteIntegration = () => { }); }; +export const useGetIntegration = ( + integrationId: string, + options?: Omit< + UseQueryOptions< + TIntegrationWithEnv, + unknown, + TIntegrationWithEnv, + ReturnType + >, + "queryFn" | "queryKey" + > +) => { + return useQuery({ + ...options, + enabled: Boolean(integrationId && options?.enabled === undefined ? true : options?.enabled), + queryKey: integrationQueryKeys.getIntegration(integrationId), + queryFn: () => fetchIntegration(integrationId) + }); +}; + export const useSyncIntegration = () => { return useMutation<{}, {}, { id: string; workspaceId: string; lastUsed: string }>({ mutationFn: ({ id }) => apiRequest.post(`/api/v1/integration/${id}/sync`), diff --git a/frontend/src/hooks/api/integrations/types.ts b/frontend/src/hooks/api/integrations/types.ts index 21e6bff26b..bfaa73884f 100644 --- a/frontend/src/hooks/api/integrations/types.ts +++ b/frontend/src/hooks/api/integrations/types.ts @@ -34,13 +34,37 @@ export type TIntegration = { syncMessage?: string; __v: number; metadata?: { + githubVisibility?: string; + githubVisibilityRepoIds?: string[]; + shouldAutoRedeploy?: boolean; + secretAWSTag?: { + key: string; + value: string; + }[]; + + kmsKeyId?: string; secretSuffix?: string; + secretPrefix?: string; syncBehavior?: IntegrationSyncBehavior; + initialSyncBehavior: IntegrationSyncBehavior; mappingBehavior?: IntegrationMappingBehavior; scope: string; org: string; project: string; environment: string; + + shouldDisableDelete?: boolean; + shouldMaskSecrets?: boolean; + shouldProtectSecrets?: boolean; + shouldEnableDelete?: boolean; + }; +}; + +export type TIntegrationWithEnv = TIntegration & { + environment: { + id: string; + name: string; + slug: string; }; }; diff --git a/frontend/src/hooks/api/kms/index.tsx b/frontend/src/hooks/api/kms/index.tsx new file mode 100644 index 0000000000..84b238a3b7 --- /dev/null +++ b/frontend/src/hooks/api/kms/index.tsx @@ -0,0 +1,8 @@ +export { + useAddExternalKms, + useLoadProjectKmsBackup, + useRemoveExternalKms, + useUpdateExternalKms, + useUpdateProjectKms +} from "./mutations"; +export { useGetActiveProjectKms, useGetExternalKmsById, useGetExternalKmsList } from "./queries"; diff --git a/frontend/src/hooks/api/kms/mutations.tsx b/frontend/src/hooks/api/kms/mutations.tsx new file mode 100644 index 0000000000..f0c623ceb1 --- /dev/null +++ b/frontend/src/hooks/api/kms/mutations.tsx @@ -0,0 +1,98 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { kmsKeys } from "./queries"; +import { AddExternalKmsType, KmsType } from "./types"; + +export const useAddExternalKms = (orgId: string) => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ name, description, provider }: AddExternalKmsType) => { + const { data } = await apiRequest.post("/api/v1/external-kms", { + name, + description, + provider + }); + + return data; + }, + onSuccess: () => { + queryClient.invalidateQueries(kmsKeys.getExternalKmsList(orgId)); + } + }); +}; + +export const useUpdateExternalKms = (orgId: string) => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + kmsId, + name, + description, + provider + }: { + kmsId: string; + } & AddExternalKmsType) => { + const { data } = await apiRequest.patch(`/api/v1/external-kms/${kmsId}`, { + name, + description, + provider + }); + + return data; + }, + onSuccess: (_, { kmsId }) => { + queryClient.invalidateQueries(kmsKeys.getExternalKmsList(orgId)); + queryClient.invalidateQueries(kmsKeys.getExternalKmsById(kmsId)); + } + }); +}; + +export const useRemoveExternalKms = (orgId: string) => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (kmsId: string) => { + const { data } = await apiRequest.delete(`/api/v1/external-kms/${kmsId}`); + + return data; + }, + onSuccess: () => { + queryClient.invalidateQueries(kmsKeys.getExternalKmsList(orgId)); + } + }); +}; + +export const useUpdateProjectKms = (projectId: string) => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ( + updatedData: { type: KmsType.Internal } | { type: KmsType.External; kmsId: string } + ) => { + const { data } = await apiRequest.patch(`/api/v1/workspace/${projectId}/kms`, { + kms: updatedData + }); + + return data; + }, + onSuccess: () => { + queryClient.invalidateQueries(kmsKeys.getActiveProjectKms(projectId)); + } + }); +}; + +export const useLoadProjectKmsBackup = (projectId: string) => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (backup: string) => { + const { data } = await apiRequest.post(`/api/v1/workspace/${projectId}/kms/backup`, { + backup + }); + + return data; + }, + onSuccess: () => { + queryClient.invalidateQueries(kmsKeys.getActiveProjectKms(projectId)); + } + }); +}; diff --git a/frontend/src/hooks/api/kms/queries.tsx b/frontend/src/hooks/api/kms/queries.tsx new file mode 100644 index 0000000000..9efa9cc512 --- /dev/null +++ b/frontend/src/hooks/api/kms/queries.tsx @@ -0,0 +1,64 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { Kms, KmsListEntry } from "./types"; + +export const kmsKeys = { + getExternalKmsList: (orgId: string) => ["get-all-external-kms", { orgId }], + getExternalKmsById: (id: string) => ["get-external-kms", { id }], + getActiveProjectKms: (projectId: string) => ["get-active-project-kms", { projectId }] +}; + +export const useGetExternalKmsList = (orgId: string, { enabled }: { enabled?: boolean } = {}) => { + return useQuery({ + queryKey: kmsKeys.getExternalKmsList(orgId), + enabled, + queryFn: async () => { + const { + data: { externalKmsList } + } = await apiRequest.get<{ externalKmsList: KmsListEntry[] }>("/api/v1/external-kms"); + return externalKmsList; + } + }); +}; + +export const useGetExternalKmsById = (kmsId: string) => { + return useQuery({ + queryKey: kmsKeys.getExternalKmsById(kmsId), + enabled: Boolean(kmsId), + queryFn: async () => { + const { + data: { externalKms } + } = await apiRequest.get<{ externalKms: Kms }>(`/api/v1/external-kms/${kmsId}`); + return externalKms; + } + }); +}; + +export const useGetActiveProjectKms = (projectId: string) => { + return useQuery({ + queryKey: kmsKeys.getActiveProjectKms(projectId), + enabled: Boolean(projectId), + queryFn: async () => { + const { + data: { secretManagerKmsKey } + } = await apiRequest.get<{ + secretManagerKmsKey: { + id: string; + name: string; + isExternal: string; + }; + }>(`/api/v1/workspace/${projectId}/kms`); + return secretManagerKmsKey; + } + }); +}; + +export const fetchProjectKmsBackup = async (projectId: string) => { + const { data } = await apiRequest.get<{ + secretManager: string; + }>(`/api/v1/workspace/${projectId}/kms/backup`); + + return data; +}; diff --git a/frontend/src/hooks/api/kms/types.ts b/frontend/src/hooks/api/kms/types.ts new file mode 100644 index 0000000000..513b59e2a1 --- /dev/null +++ b/frontend/src/hooks/api/kms/types.ts @@ -0,0 +1,102 @@ +import slugify from "@sindresorhus/slugify"; +import { z } from "zod"; + +export type Kms = { + id: string; + description: string; + orgId: string; + name: string; + external: { + id: string; + status: string; + statusDetails: string; + provider: string; + providerInput: Record; + }; +}; + +export type KmsListEntry = { + id: string; + description: string; + isDisabled: boolean; + createdAt: string; + updatedAt: string; + name: string; + externalKms: { + provider: string; + status: string; + statusDetails: string; + }; +}; + +export enum KmsType { + Internal = "internal", + External = "external" +} + +export enum ExternalKmsProvider { + AWS = "aws" +} + +export const INTERNAL_KMS_KEY_ID = "internal"; + +export enum KmsAwsCredentialType { + AssumeRole = "assume-role", + AccessKey = "access-key" +} + +export const ExternalKmsAwsSchema = z.object({ + credential: z + .discriminatedUnion("type", [ + z.object({ + type: z.literal(KmsAwsCredentialType.AccessKey), + data: z.object({ + accessKey: z.string().trim().min(1).describe("AWS user account access key"), + secretKey: z.string().trim().min(1).describe("AWS user account secret key") + }) + }), + z.object({ + type: z.literal(KmsAwsCredentialType.AssumeRole), + data: z.object({ + assumeRoleArn: z + .string() + .trim() + .min(1) + .describe("AWS user role to be assumed by infisical"), + externalId: z + .string() + .trim() + .min(1) + .optional() + .describe("AWS assume role external id for furthur security in authentication") + }) + }) + ]) + .describe("AWS credential information to connect"), + awsRegion: z.string().min(1).trim().describe("AWS region to connect"), + kmsKeyId: z + .string() + .trim() + .optional() + .describe( + "A pre existing AWS KMS key id to be used for encryption. If not provided a kms key will be generated." + ) +}); + +export const ExternalKmsInputSchema = z.discriminatedUnion("type", [ + z.object({ type: z.literal(ExternalKmsProvider.AWS), inputs: ExternalKmsAwsSchema }) +]); + +export const AddExternalKmsSchema = z.object({ + name: z + .string() + .trim() + .min(1) + .refine((v) => slugify(v) === v, { + message: "Alias must be a valid slug" + }), + description: z.string().trim().optional(), + provider: ExternalKmsInputSchema +}); + +export type AddExternalKmsType = z.infer; diff --git a/frontend/src/hooks/api/ldapConfig/mutations.tsx b/frontend/src/hooks/api/ldapConfig/mutations.tsx index a93286cdb1..2014f52bd0 100644 --- a/frontend/src/hooks/api/ldapConfig/mutations.tsx +++ b/frontend/src/hooks/api/ldapConfig/mutations.tsx @@ -13,6 +13,7 @@ export const useCreateLDAPConfig = () => { url, bindDN, bindPass, + uniqueUserAttribute, searchBase, searchFilter, groupSearchBase, @@ -24,6 +25,7 @@ export const useCreateLDAPConfig = () => { url: string; bindDN: string; bindPass: string; + uniqueUserAttribute: string; searchBase: string; searchFilter: string; groupSearchBase: string; @@ -36,6 +38,7 @@ export const useCreateLDAPConfig = () => { url, bindDN, bindPass, + uniqueUserAttribute, searchBase, searchFilter, groupSearchBase, @@ -60,6 +63,7 @@ export const useUpdateLDAPConfig = () => { url, bindDN, bindPass, + uniqueUserAttribute, searchBase, searchFilter, groupSearchBase, @@ -71,6 +75,7 @@ export const useUpdateLDAPConfig = () => { url?: string; bindDN?: string; bindPass?: string; + uniqueUserAttribute?: string; searchBase?: string; searchFilter?: string; groupSearchBase?: string; @@ -83,6 +88,7 @@ export const useUpdateLDAPConfig = () => { url, bindDN, bindPass, + uniqueUserAttribute, searchBase, searchFilter, groupSearchBase, diff --git a/frontend/src/hooks/api/ldapConfig/queries.tsx b/frontend/src/hooks/api/ldapConfig/queries.tsx index c84f90aacf..e92a7a1c76 100644 --- a/frontend/src/hooks/api/ldapConfig/queries.tsx +++ b/frontend/src/hooks/api/ldapConfig/queries.tsx @@ -13,9 +13,15 @@ export const useGetLDAPConfig = (organizationId: string) => { return useQuery({ queryKey: ldapConfigKeys.getLDAPConfig(organizationId), queryFn: async () => { - const { data } = await apiRequest.get(`/api/v1/ldap/config?organizationId=${organizationId}`); + try { + const { data } = await apiRequest.get( + `/api/v1/ldap/config?organizationId=${organizationId}` + ); - return data; + return data; + } catch (err) { + return null; + } }, enabled: true }); diff --git a/frontend/src/hooks/api/migration/mutations.tsx b/frontend/src/hooks/api/migration/mutations.tsx new file mode 100644 index 0000000000..41d17b0bdb --- /dev/null +++ b/frontend/src/hooks/api/migration/mutations.tsx @@ -0,0 +1,37 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { workspaceKeys } from "../workspace"; + +export const useImportEnvKey = () => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async ({ file, decryptionKey }: { file: File; decryptionKey: string }) => { + const formData = new FormData(); + + formData.append("decryptionKey", decryptionKey); + formData.append("file", file); + + try { + const response = await apiRequest.post("/api/v3/migrate/env-key/", formData, { + headers: { + "Content-Type": "multipart/form-data" + }, + onUploadProgress: (progressEvent) => { + const percentCompleted = Math.round((progressEvent.loaded * 100) / progressEvent.total); + console.log(`Upload Progress: ${percentCompleted}%`); + } + }); + + console.log("Upload successful:", response.data); + } catch (error) { + console.error("Upload failed:", error); + } + }, + onSuccess: () => { + queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); + } + }); +}; diff --git a/frontend/src/hooks/api/oidcConfig/index.tsx b/frontend/src/hooks/api/oidcConfig/index.tsx new file mode 100644 index 0000000000..b69c251208 --- /dev/null +++ b/frontend/src/hooks/api/oidcConfig/index.tsx @@ -0,0 +1 @@ +export * from "./queries"; diff --git a/frontend/src/hooks/api/oidcConfig/mutations.tsx b/frontend/src/hooks/api/oidcConfig/mutations.tsx new file mode 100644 index 0000000000..9f57c0332b --- /dev/null +++ b/frontend/src/hooks/api/oidcConfig/mutations.tsx @@ -0,0 +1,113 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { organizationKeys } from "../organization/queries"; +import { oidcConfigKeys } from "./queries"; + +export const useUpdateOIDCConfig = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + issuer, + authorizationEndpoint, + configurationType, + discoveryURL, + jwksUri, + tokenEndpoint, + userinfoEndpoint, + allowedEmailDomains, + clientId, + clientSecret, + isActive, + orgSlug + }: { + allowedEmailDomains?: string; + issuer?: string; + authorizationEndpoint?: string; + discoveryURL?: string; + jwksUri?: string; + tokenEndpoint?: string; + userinfoEndpoint?: string; + clientId?: string; + clientSecret?: string; + isActive?: boolean; + configurationType?: string; + orgSlug: string; + }) => { + const { data } = await apiRequest.patch("/api/v1/sso/oidc/config", { + issuer, + allowedEmailDomains, + authorizationEndpoint, + discoveryURL, + configurationType, + jwksUri, + tokenEndpoint, + userinfoEndpoint, + clientId, + orgSlug, + clientSecret, + isActive + }); + + return data; + }, + onSuccess(_, dto) { + queryClient.invalidateQueries(oidcConfigKeys.getOIDCConfig(dto.orgSlug)); + queryClient.invalidateQueries(organizationKeys.getUserOrganizations); + } + }); +}; + +export const useCreateOIDCConfig = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + issuer, + configurationType, + discoveryURL, + authorizationEndpoint, + allowedEmailDomains, + jwksUri, + tokenEndpoint, + userinfoEndpoint, + clientId, + clientSecret, + isActive, + orgSlug + }: { + issuer?: string; + configurationType: string; + discoveryURL?: string; + authorizationEndpoint?: string; + jwksUri?: string; + tokenEndpoint?: string; + userinfoEndpoint?: string; + clientId: string; + clientSecret: string; + isActive: boolean; + orgSlug: string; + allowedEmailDomains?: string; + }) => { + const { data } = await apiRequest.post("/api/v1/sso/oidc/config", { + issuer, + configurationType, + discoveryURL, + authorizationEndpoint, + allowedEmailDomains, + jwksUri, + tokenEndpoint, + userinfoEndpoint, + clientId, + clientSecret, + isActive, + orgSlug + }); + + return data; + }, + onSuccess(_, dto) { + queryClient.invalidateQueries(oidcConfigKeys.getOIDCConfig(dto.orgSlug)); + } + }); +}; diff --git a/frontend/src/hooks/api/oidcConfig/queries.tsx b/frontend/src/hooks/api/oidcConfig/queries.tsx new file mode 100644 index 0000000000..49db7c5d46 --- /dev/null +++ b/frontend/src/hooks/api/oidcConfig/queries.tsx @@ -0,0 +1,27 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { OIDCConfigData } from "./types"; + +export const oidcConfigKeys = { + getOIDCConfig: (orgSlug: string) => [{ orgSlug }, "organization-oidc"] as const +}; + +export const useGetOIDCConfig = (orgSlug: string) => { + return useQuery({ + queryKey: oidcConfigKeys.getOIDCConfig(orgSlug), + queryFn: async () => { + try { + const { data } = await apiRequest.get( + `/api/v1/sso/oidc/config?orgSlug=${orgSlug}` + ); + + return data; + } catch (err) { + return null; + } + }, + enabled: true + }); +}; diff --git a/frontend/src/hooks/api/oidcConfig/types.ts b/frontend/src/hooks/api/oidcConfig/types.ts new file mode 100644 index 0000000000..1b8ede5e3e --- /dev/null +++ b/frontend/src/hooks/api/oidcConfig/types.ts @@ -0,0 +1,15 @@ +export type OIDCConfigData = { + id: string; + issuer: string; + authorizationEndpoint: string; + configurationType: string; + discoveryURL: string; + jwksUri: string; + tokenEndpoint: string; + userinfoEndpoint: string; + isActive: boolean; + orgId: string; + clientId: string; + clientSecret: string; + allowedEmailDomains?: string; +}; diff --git a/frontend/src/hooks/api/orgAdmin/index.tsx b/frontend/src/hooks/api/orgAdmin/index.tsx new file mode 100644 index 0000000000..57eed413ac --- /dev/null +++ b/frontend/src/hooks/api/orgAdmin/index.tsx @@ -0,0 +1,2 @@ +export { useOrgAdminAccessProject } from "./mutation"; +export { useOrgAdminGetProjects } from "./queries"; diff --git a/frontend/src/hooks/api/orgAdmin/mutation.tsx b/frontend/src/hooks/api/orgAdmin/mutation.tsx new file mode 100644 index 0000000000..9fa93722ed --- /dev/null +++ b/frontend/src/hooks/api/orgAdmin/mutation.tsx @@ -0,0 +1,15 @@ +import { useMutation } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { TOrgAdminAccessProjectDTO } from "./types"; + +export const useOrgAdminAccessProject = () => + useMutation({ + mutationFn: async ({ projectId }: TOrgAdminAccessProjectDTO) => { + const { data } = await apiRequest.post( + `/api/v1/organization-admin/projects/${projectId}/grant-admin-access` + ); + return data; + } + }); diff --git a/frontend/src/hooks/api/orgAdmin/queries.tsx b/frontend/src/hooks/api/orgAdmin/queries.tsx new file mode 100644 index 0000000000..2856de0a2e --- /dev/null +++ b/frontend/src/hooks/api/orgAdmin/queries.tsx @@ -0,0 +1,30 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { Workspace } from "../types"; +import { TOrgAdminGetProjectsDTO } from "./types"; + +export const orgAdminQueryKeys = { + getProjects: (filter: TOrgAdminGetProjectsDTO) => ["org-admin-projects", filter] as const +}; + +export const useOrgAdminGetProjects = ({ search, offset, limit = 50 }: TOrgAdminGetProjectsDTO) => { + return useQuery({ + queryKey: orgAdminQueryKeys.getProjects({ search, offset, limit }), + queryFn: async () => { + const { data } = await apiRequest.get<{ projects: Workspace[]; count: number }>( + "/api/v1/organization-admin/projects", + { + params: { + limit, + offset, + search + } + } + ); + + return data; + } + }); +}; diff --git a/frontend/src/hooks/api/orgAdmin/types.ts b/frontend/src/hooks/api/orgAdmin/types.ts new file mode 100644 index 0000000000..87626a4662 --- /dev/null +++ b/frontend/src/hooks/api/orgAdmin/types.ts @@ -0,0 +1,9 @@ +export type TOrgAdminGetProjectsDTO = { + limit?: number; + offset?: number; + search?: string; +}; + +export type TOrgAdminAccessProjectDTO = { + projectId: string; +}; diff --git a/frontend/src/hooks/api/organization/index.ts b/frontend/src/hooks/api/organization/index.ts index f7a898ef00..fece19e5f2 100644 --- a/frontend/src/hooks/api/organization/index.ts +++ b/frontend/src/hooks/api/organization/index.ts @@ -8,8 +8,9 @@ export { useDeleteOrgTaxId, useGetIdentityMembershipOrgs, useGetOrganizationGroups, - useGetOrganizations, + useGetOrganizations, useGetOrgBillingDetails, + useGetOrgIntegrationAuths, useGetOrgInvoices, useGetOrgLicenses, useGetOrgPlanBillingInfo, @@ -20,4 +21,4 @@ export { useGetOrgTrialUrl, useUpdateOrg, useUpdateOrgBillingDetails -} from "./queries"; \ No newline at end of file +} from "./queries"; diff --git a/frontend/src/hooks/api/organization/queries.tsx b/frontend/src/hooks/api/organization/queries.tsx index 5c57428dfa..4923177ba6 100644 --- a/frontend/src/hooks/api/organization/queries.tsx +++ b/frontend/src/hooks/api/organization/queries.tsx @@ -1,19 +1,23 @@ -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useMutation, useQuery, useQueryClient, UseQueryOptions } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; +import { OrderByDirection } from "@app/hooks/api/generic/types"; import { TGroupOrgMembership } from "../groups/types"; -import { IdentityMembershipOrg } from "../identities/types"; +import { IntegrationAuth } from "../types"; import { BillingDetails, Invoice, License, Organization, + OrgIdentityOrderBy, OrgPlanTable, PlanBillingInfo, PmtMethod, ProductsTable, TaxID, + TListOrgIdentitiesDTO, + TOrgIdentitiesList, UpdateOrgDTO } from "./types"; @@ -30,7 +34,14 @@ export const organizationKeys = { getOrgLicenses: (orgId: string) => [{ orgId }, "organization-licenses"] as const, getOrgIdentityMemberships: (orgId: string) => [{ orgId }, "organization-identity-memberships"] as const, - getOrgGroups: (orgId: string) => [{ orgId }, "organization-groups"] as const + // allows invalidation using above key without knowing params + getOrgIdentityMembershipsWithParams: ({ + organizationId: orgId, + ...params + }: TListOrgIdentitiesDTO) => + [...organizationKeys.getOrgIdentityMemberships(orgId), params] as const, + getOrgGroups: (orgId: string) => [{ orgId }, "organization-groups"] as const, + getOrgIntegrationAuths: (orgId: string) => [{ orgId }, "integration-auths"] as const }; export const fetchOrganizations = async () => { @@ -73,12 +84,24 @@ export const useCreateOrg = (options: { invalidate: boolean } = { invalidate: tr export const useUpdateOrg = () => { const queryClient = useQueryClient(); return useMutation<{}, {}, UpdateOrgDTO>({ - mutationFn: ({ name, authEnforced, scimEnabled, slug, orgId }) => { + mutationFn: ({ + name, + authEnforced, + scimEnabled, + slug, + orgId, + defaultMembershipRoleSlug, + enforceMfa, + selectedMfaMethod + }) => { return apiRequest.patch(`/api/v1/organization/${orgId}`, { name, authEnforced, scimEnabled, - slug + slug, + defaultMembershipRoleSlug, + enforceMfa, + selectedMfaMethod }); }, onSuccess: () => { @@ -360,19 +383,51 @@ export const useGetOrgLicenses = (organizationId: string) => { }); }; -export const useGetIdentityMembershipOrgs = (organizationId: string) => { +export const useGetIdentityMembershipOrgs = ( + { + organizationId, + offset = 0, + limit = 100, + orderBy = OrgIdentityOrderBy.Name, + orderDirection = OrderByDirection.ASC, + search = "" + }: TListOrgIdentitiesDTO, + options?: Omit< + UseQueryOptions< + TOrgIdentitiesList, + unknown, + TOrgIdentitiesList, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + const params = new URLSearchParams({ + offset: String(offset), + limit: String(limit), + orderBy: String(orderBy), + orderDirection: String(orderDirection), + search: String(search) + }); return useQuery({ - queryKey: organizationKeys.getOrgIdentityMemberships(organizationId), + queryKey: organizationKeys.getOrgIdentityMembershipsWithParams({ + organizationId, + offset, + limit, + orderBy, + orderDirection, + search + }), queryFn: async () => { - const { - data: { identityMemberships } - } = await apiRequest.get<{ identityMemberships: IdentityMembershipOrg[] }>( - `/api/v2/organizations/${organizationId}/identity-memberships` + const { data } = await apiRequest.get( + `/api/v2/organizations/${organizationId}/identity-memberships`, + { params } ); - return identityMemberships; + return data; }, - enabled: true + enabled: true, + ...options }); }; @@ -422,3 +477,21 @@ export const useGetOrganizationGroups = (organizationId: string) => { } }); }; + +export const useGetOrgIntegrationAuths = ( + organizationId: string, + select?: (data: IntegrationAuth[]) => TData +) => { + return useQuery({ + queryKey: organizationKeys.getOrgIntegrationAuths(organizationId), + queryFn: async () => { + const { data } = await apiRequest.get<{ authorizations: IntegrationAuth[] }>( + `/api/v1/organization/${organizationId}/integration-authorizations` + ); + + return data.authorizations; + }, + enabled: Boolean(organizationId), + select + }); +}; diff --git a/frontend/src/hooks/api/organization/types.ts b/frontend/src/hooks/api/organization/types.ts index 9c26ce02ba..5fbbd18634 100644 --- a/frontend/src/hooks/api/organization/types.ts +++ b/frontend/src/hooks/api/organization/types.ts @@ -1,11 +1,20 @@ +import { OrderByDirection } from "@app/hooks/api/generic/types"; +import { IdentityMembershipOrg } from "@app/hooks/api/identities/types"; + +import { MfaMethod } from "../auth/types"; + export type Organization = { id: string; name: string; createAt: string; updatedAt: string; authEnforced: boolean; + orgAuthMethod: string; scimEnabled: boolean; slug: string; + defaultMembershipRole: string; + enforceMfa: boolean; + selectedMfaMethod?: MfaMethod; }; export type UpdateOrgDTO = { @@ -14,6 +23,9 @@ export type UpdateOrgDTO = { authEnforced?: boolean; scimEnabled?: boolean; slug?: string; + defaultMembershipRoleSlug?: string; + enforceMfa?: boolean; + selectedMfaMethod?: MfaMethod; }; export type BillingDetails = { @@ -102,3 +114,22 @@ export type ProductsTable = { head: ProductsTableHead[]; rows: ProductsTableRow[]; }; + +export type TListOrgIdentitiesDTO = { + organizationId: string; + offset?: number; + limit?: number; + orderBy?: OrgIdentityOrderBy; + orderDirection?: OrderByDirection; + search?: string; +}; + +export type TOrgIdentitiesList = { + identityMemberships: IdentityMembershipOrg[]; + totalCount: number; +}; + +export enum OrgIdentityOrderBy { + Name = "name" + // Role = "role" +} diff --git a/frontend/src/hooks/api/pkiAlerts/index.tsx b/frontend/src/hooks/api/pkiAlerts/index.tsx new file mode 100644 index 0000000000..f88f716795 --- /dev/null +++ b/frontend/src/hooks/api/pkiAlerts/index.tsx @@ -0,0 +1,2 @@ +export { useCreatePkiAlert, useDeletePkiAlert, useUpdatePkiAlert } from "./mutations"; +export { useGetPkiAlertById } from "./queries"; diff --git a/frontend/src/hooks/api/pkiAlerts/mutations.tsx b/frontend/src/hooks/api/pkiAlerts/mutations.tsx new file mode 100644 index 0000000000..54aa8eeffe --- /dev/null +++ b/frontend/src/hooks/api/pkiAlerts/mutations.tsx @@ -0,0 +1,51 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { workspaceKeys } from "../workspace"; +import { pkiAlertKeys } from "./queries"; +import { TCreatePkiAlertDTO, TDeletePkiAlertDTO, TPkiAlert, TUpdatePkiAlertDTO } from "./types"; + +export const useCreatePkiAlert = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (body) => { + const { data: alert } = await apiRequest.post("/api/v1/pki/alerts", body); + return alert; + }, + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspacePkiAlerts(projectId)); + } + }); +}; + +export const useUpdatePkiAlert = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ alertId, ...body }) => { + const { data: alert } = await apiRequest.patch( + `/api/v1/pki/alerts/${alertId}`, + body + ); + return alert; + }, + onSuccess: (_, { projectId, alertId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspacePkiAlerts(projectId)); + queryClient.invalidateQueries(pkiAlertKeys.getPkiAlertById(alertId)); + } + }); +}; + +export const useDeletePkiAlert = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ alertId }) => { + const { data: alert } = await apiRequest.delete(`/api/v1/pki/alerts/${alertId}`); + return alert; + }, + onSuccess: (_, { projectId, alertId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspacePkiAlerts(projectId)); + queryClient.invalidateQueries(pkiAlertKeys.getPkiAlertById(alertId)); + } + }); +}; diff --git a/frontend/src/hooks/api/pkiAlerts/queries.tsx b/frontend/src/hooks/api/pkiAlerts/queries.tsx new file mode 100644 index 0000000000..db324e96d1 --- /dev/null +++ b/frontend/src/hooks/api/pkiAlerts/queries.tsx @@ -0,0 +1,20 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { TPkiAlert } from "./types"; + +export const pkiAlertKeys = { + getPkiAlertById: (alertId: string) => [{ alertId }, "alert"] +}; + +export const useGetPkiAlertById = (alertId: string) => { + return useQuery({ + queryKey: pkiAlertKeys.getPkiAlertById(alertId), + queryFn: async () => { + const { data: alert } = await apiRequest.get(`/api/v1/pki/alerts/${alertId}`); + return alert; + }, + enabled: Boolean(alertId) + }); +}; diff --git a/frontend/src/hooks/api/pkiAlerts/types.ts b/frontend/src/hooks/api/pkiAlerts/types.ts new file mode 100644 index 0000000000..744c4e598e --- /dev/null +++ b/frontend/src/hooks/api/pkiAlerts/types.ts @@ -0,0 +1,32 @@ +export type TPkiAlert = { + id: string; + name: string; + projectId: string; + pkiCollectionId: string; + alertBeforeDays: number; + recipientEmails: string; + createdAt: string; + updatedAt: string; +}; + +export type TCreatePkiAlertDTO = { + projectId: string; + name: string; + pkiCollectionId: string; + alertBeforeDays: number; + emails: string[]; +}; + +export type TUpdatePkiAlertDTO = { + alertId: string; + projectId: string; + pkiCollectionId?: string; + name?: string; + alertBeforeDays?: number; + emails?: string[]; +}; + +export type TDeletePkiAlertDTO = { + alertId: string; + projectId: string; +}; diff --git a/frontend/src/hooks/api/pkiCollections/constants.tsx b/frontend/src/hooks/api/pkiCollections/constants.tsx new file mode 100644 index 0000000000..c103dc3866 --- /dev/null +++ b/frontend/src/hooks/api/pkiCollections/constants.tsx @@ -0,0 +1,9 @@ +export enum PkiItemType { + CERTIFICATE = "certificate", + CA = "ca" +} + +export const pkiItemTypeToNameMap: { [K in PkiItemType]: string } = { + [PkiItemType.CA]: "CA", + [PkiItemType.CERTIFICATE]: "Certificate" +}; diff --git a/frontend/src/hooks/api/pkiCollections/index.tsx b/frontend/src/hooks/api/pkiCollections/index.tsx new file mode 100644 index 0000000000..cf638d8fad --- /dev/null +++ b/frontend/src/hooks/api/pkiCollections/index.tsx @@ -0,0 +1,7 @@ +export { + useAddItemToPkiCollection, + useCreatePkiCollection, + useDeletePkiCollection, + useRemoveItemFromPkiCollection, + useUpdatePkiCollection} from "./mutations"; +export { useGetPkiCollectionById, useListPkiCollectionItems } from "./queries"; diff --git a/frontend/src/hooks/api/pkiCollections/mutations.tsx b/frontend/src/hooks/api/pkiCollections/mutations.tsx new file mode 100644 index 0000000000..496a170519 --- /dev/null +++ b/frontend/src/hooks/api/pkiCollections/mutations.tsx @@ -0,0 +1,98 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { workspaceKeys } from "../workspace"; +import { pkiCollectionKeys } from "./queries"; +import { + TAddItemToPkiCollectionDTO, + TCreatePkiCollectionDTO, + TDeletePkiCollectionDTO, + TPkiCollection, + TPkiCollectionItem, + TRemoveItemFromPkiCollectionDTO, + TUpdatePkiCollectionTO +} from "./types"; + +export const useCreatePkiCollection = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (body) => { + const { data: pkiCollection } = await apiRequest.post( + "/api/v1/pki/collections", + body + ); + return pkiCollection; + }, + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspacePkiCollections(projectId)); + } + }); +}; + +export const useUpdatePkiCollection = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ collectionId, ...body }) => { + const { data: pkiCollection } = await apiRequest.patch( + `/api/v1/pki/collections/${collectionId}`, + body + ); + return pkiCollection; + }, + onSuccess: (_, { projectId, collectionId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspacePkiCollections(projectId)); + queryClient.invalidateQueries(pkiCollectionKeys.getPkiCollectionById(collectionId)); + } + }); +}; + +export const useDeletePkiCollection = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ collectionId }) => { + const { data: pkiCollection } = await apiRequest.delete( + `/api/v1/pki/collections/${collectionId}` + ); + return pkiCollection; + }, + onSuccess: (_, { projectId, collectionId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspacePkiCollections(projectId)); + queryClient.invalidateQueries(pkiCollectionKeys.getPkiCollectionById(collectionId)); + } + }); +}; + +export const useAddItemToPkiCollection = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ collectionId, type, itemId }) => { + const { data: pkiCollectionItem } = await apiRequest.post( + `/api/v1/pki/collections/${collectionId}/items`, + { + type, + itemId + } + ); + return pkiCollectionItem; + }, + onSuccess: (_, { collectionId }) => { + queryClient.invalidateQueries(pkiCollectionKeys.getPkiCollectionItems(collectionId)); + } + }); +}; + +export const useRemoveItemFromPkiCollection = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ collectionId, itemId }) => { + const { data: pkiCollectionItem } = await apiRequest.delete( + `/api/v1/pki/collections/${collectionId}/items/${itemId}` + ); + return pkiCollectionItem; + }, + onSuccess: (_, { collectionId }) => { + queryClient.invalidateQueries(pkiCollectionKeys.getPkiCollectionItems(collectionId)); + } + }); +}; diff --git a/frontend/src/hooks/api/pkiCollections/queries.tsx b/frontend/src/hooks/api/pkiCollections/queries.tsx new file mode 100644 index 0000000000..1098627673 --- /dev/null +++ b/frontend/src/hooks/api/pkiCollections/queries.tsx @@ -0,0 +1,85 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { PkiItemType } from "./constants"; +import { TPkiCollection, TPkiCollectionItem } from "./types"; + +export const pkiCollectionKeys = { + getPkiCollectionById: (collectionId: string) => [{ collectionId }, "pki-collection"] as const, + getPkiCollectionItems: (collectionId: string) => + [{ collectionId }, "pki-collection-items"] as const, + specificPkiCollectionItems: ({ + collectionId, + type, + offset, + limit + }: { + collectionId: string; + type?: PkiItemType; + offset: number; + limit: number; + }) => + [ + ...pkiCollectionKeys.getPkiCollectionItems(collectionId), + { offset, limit, type }, + "pki-collection-items-2" + ] as const +}; + +export const useGetPkiCollectionById = (collectionId: string) => { + return useQuery({ + queryKey: pkiCollectionKeys.getPkiCollectionById(collectionId), + queryFn: async () => { + const { data: pkiCollection } = await apiRequest.get( + `/api/v1/pki/collections/${collectionId}` + ); + return pkiCollection; + }, + enabled: Boolean(collectionId) + }); +}; + +export const useListPkiCollectionItems = ({ + collectionId, + type, + offset, + limit +}: { + collectionId: string; + type?: PkiItemType; + offset: number; + limit: number; +}) => { + return useQuery({ + queryKey: pkiCollectionKeys.specificPkiCollectionItems({ + collectionId, + offset, + limit, + type + }), + queryFn: async () => { + const params = new URLSearchParams({ + offset: String(offset), + limit: String(limit), + ...(type ? { type } : {}) + }); + + const { + data: { collectionItems, totalCount } + } = await apiRequest.get<{ + collectionItems: (TPkiCollectionItem & { + notBefore: string; + notAfter: string; + friendlyName: string; + })[]; + totalCount: number; + }>(`/api/v1/pki/collections/${collectionId}/items`, { + params + }); + + return { collectionItems, totalCount }; + }, + enabled: Boolean(collectionId) + }); +}; diff --git a/frontend/src/hooks/api/pkiCollections/types.ts b/frontend/src/hooks/api/pkiCollections/types.ts new file mode 100644 index 0000000000..98a98d520b --- /dev/null +++ b/frontend/src/hooks/api/pkiCollections/types.ts @@ -0,0 +1,46 @@ +export type TPkiCollection = { + id: string; + name: string; + description: string; + projectId: string; + createdAt: string; + updatedAt: string; +}; + +export type TCreatePkiCollectionDTO = { + projectId: string; + name: string; + description: string; +}; + +export type TUpdatePkiCollectionTO = { + collectionId: string; + projectId: string; + name?: string; + description?: string; +}; + +export type TDeletePkiCollectionDTO = { + collectionId: string; + projectId: string; +}; + +export type TPkiCollectionItem = { + id: string; + collectionId: string; + type: string; + itemId: string; + createdAt: string; + updatedAt: string; +}; + +export type TAddItemToPkiCollectionDTO = { + collectionId: string; + type: string; + itemId: string; +}; + +export type TRemoveItemFromPkiCollectionDTO = { + collectionId: string; + itemId: string; +}; diff --git a/frontend/src/hooks/api/policies/enums.ts b/frontend/src/hooks/api/policies/enums.ts new file mode 100644 index 0000000000..f91bcb98c2 --- /dev/null +++ b/frontend/src/hooks/api/policies/enums.ts @@ -0,0 +1,9 @@ +export enum EnforcementLevel { + Hard = "hard", + Soft = "soft" +} + +export enum PolicyType { + ChangePolicy = "change", + AccessPolicy = "access" +} diff --git a/frontend/src/hooks/api/projectTemplates/index.ts b/frontend/src/hooks/api/projectTemplates/index.ts new file mode 100644 index 0000000000..177955438b --- /dev/null +++ b/frontend/src/hooks/api/projectTemplates/index.ts @@ -0,0 +1,3 @@ +export * from "./mutations"; +export * from "./queries"; +export * from "./types"; diff --git a/frontend/src/hooks/api/projectTemplates/mutations.tsx b/frontend/src/hooks/api/projectTemplates/mutations.tsx new file mode 100644 index 0000000000..7056f738ed --- /dev/null +++ b/frontend/src/hooks/api/projectTemplates/mutations.tsx @@ -0,0 +1,58 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; +import { projectTemplateKeys } from "@app/hooks/api/projectTemplates/queries"; +import { + TCreateProjectTemplateDTO, + TDeleteProjectTemplateDTO, + TProjectTemplateResponse, + TUpdateProjectTemplateDTO +} from "@app/hooks/api/projectTemplates/types"; + +export const useCreateProjectTemplate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (payload: TCreateProjectTemplateDTO) => { + const { data } = await apiRequest.post( + "/api/v1/project-templates", + payload + ); + + return data.projectTemplate; + }, + onSuccess: () => queryClient.invalidateQueries(projectTemplateKeys.list()) + }); +}; + +export const useUpdateProjectTemplate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ templateId, ...params }: TUpdateProjectTemplateDTO) => { + const { data } = await apiRequest.patch( + `/api/v1/project-templates/${templateId}`, + params + ); + + return data.projectTemplate; + }, + onSuccess: (_, { templateId }) => { + queryClient.invalidateQueries(projectTemplateKeys.list()); + queryClient.invalidateQueries(projectTemplateKeys.byId(templateId)); + } + }); +}; + +export const useDeleteProjectTemplate = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ templateId }: TDeleteProjectTemplateDTO) => { + const { data } = await apiRequest.delete(`/api/v1/project-templates/${templateId}`); + + return data; + }, + onSuccess: (_, { templateId }) => { + queryClient.invalidateQueries(projectTemplateKeys.list()); + queryClient.invalidateQueries(projectTemplateKeys.byId(templateId)); + } + }); +}; diff --git a/frontend/src/hooks/api/projectTemplates/queries.tsx b/frontend/src/hooks/api/projectTemplates/queries.tsx new file mode 100644 index 0000000000..f5863915a5 --- /dev/null +++ b/frontend/src/hooks/api/projectTemplates/queries.tsx @@ -0,0 +1,61 @@ +import { useQuery, UseQueryOptions } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; +import { + TListProjectTemplates, + TProjectTemplate, + TProjectTemplateResponse +} from "@app/hooks/api/projectTemplates/types"; + +export const projectTemplateKeys = { + all: ["project-template"] as const, + list: () => [...projectTemplateKeys.all, "list"] as const, + byId: (templateId: string) => [...projectTemplateKeys.all, templateId] as const +}; + +export const useListProjectTemplates = ( + options?: Omit< + UseQueryOptions< + TProjectTemplate[], + unknown, + TProjectTemplate[], + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + return useQuery({ + queryKey: projectTemplateKeys.list(), + queryFn: async () => { + const { data } = await apiRequest.get("/api/v1/project-templates"); + + return data.projectTemplates; + }, + ...options + }); +}; + +export const useGetProjectTemplateById = ( + templateId: string, + options?: Omit< + UseQueryOptions< + TProjectTemplate, + unknown, + TProjectTemplate, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { + return useQuery({ + queryKey: projectTemplateKeys.byId(templateId), + queryFn: async () => { + const { data } = await apiRequest.get( + `/api/v1/project-templates/${templateId}` + ); + + return data.projectTemplate; + }, + ...options + }); +}; diff --git a/frontend/src/hooks/api/projectTemplates/types.ts b/frontend/src/hooks/api/projectTemplates/types.ts new file mode 100644 index 0000000000..37c6cc9024 --- /dev/null +++ b/frontend/src/hooks/api/projectTemplates/types.ts @@ -0,0 +1,31 @@ +import { TProjectRole } from "@app/hooks/api/roles/types"; + +export type TProjectTemplate = { + id: string; + name: string; + description?: string; + roles: Pick[]; + environments: { name: string; slug: string; position: number }[]; + createdAt: string; + updatedAt: string; +}; + +export type TListProjectTemplates = { projectTemplates: TProjectTemplate[] }; +export type TProjectTemplateResponse = { projectTemplate: TProjectTemplate }; + +export type TCreateProjectTemplateDTO = { + name: string; + description?: string; +}; + +export type TUpdateProjectTemplateDTO = Partial< + Pick +> & { templateId: string }; + +export type TDeleteProjectTemplateDTO = { + templateId: string; +}; + +export enum InfisicalProjectTemplate { + Default = "default" +} diff --git a/frontend/src/hooks/api/projectUserAdditionalPrivilege/mutation.tsx b/frontend/src/hooks/api/projectUserAdditionalPrivilege/mutation.tsx index fb21a425e0..de0a1a51e3 100644 --- a/frontend/src/hooks/api/projectUserAdditionalPrivilege/mutation.tsx +++ b/frontend/src/hooks/api/projectUserAdditionalPrivilege/mutation.tsx @@ -1,4 +1,3 @@ -import { packRules } from "@casl/ability/extra"; import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; @@ -16,10 +15,7 @@ export const useCreateProjectUserAdditionalPrivilege = () => { return useMutation<{ privilege: TProjectUserPrivilege }, {}, TCreateProjectUserPrivilegeDTO>({ mutationFn: async (dto) => { - const { data } = await apiRequest.post("/api/v1/additional-privilege/users/permanent", { - ...dto, - permissions: packRules(dto.permissions) - }); + const { data } = await apiRequest.post("/api/v1/user-project-additional-privilege", dto); return data.privilege; }, onSuccess: (_, { projectMembershipId }) => { @@ -34,8 +30,8 @@ export const useUpdateProjectUserAdditionalPrivilege = () => { return useMutation<{ privilege: TProjectUserPrivilege }, {}, TUpdateProjectUserPrivlegeDTO>({ mutationFn: async (dto) => { const { data } = await apiRequest.patch( - `/api/v1/additional-privilege/users/${dto.privilegeId}`, - { ...dto, permissions: dto.permissions ? packRules(dto.permissions) : undefined } + `/api/v1/user-project-additional-privilege/${dto.privilegeId}`, + dto ); return data.privilege; }, @@ -51,7 +47,7 @@ export const useDeleteProjectUserAdditionalPrivilege = () => { return useMutation<{ privilege: TProjectUserPrivilege }, {}, TDeleteProjectUserPrivilegeDTO>({ mutationFn: async (dto) => { const { data } = await apiRequest.delete( - `/api/v1/additional-privilege/users/${dto.privilegeId}` + `/api/v1/user-project-additional-privilege/${dto.privilegeId}` ); return data.privilege; }, diff --git a/frontend/src/hooks/api/projectUserAdditionalPrivilege/queries.tsx b/frontend/src/hooks/api/projectUserAdditionalPrivilege/queries.tsx index 41c9a0dcc2..fd31015cf6 100644 --- a/frontend/src/hooks/api/projectUserAdditionalPrivilege/queries.tsx +++ b/frontend/src/hooks/api/projectUserAdditionalPrivilege/queries.tsx @@ -1,4 +1,3 @@ -import { PackRule, unpackRules } from "@casl/ability/extra"; import { useQuery } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; @@ -16,12 +15,9 @@ const fetchProjectUserPrivilegeDetails = async (privilegeId: string) => { const { data: { privilege } } = await apiRequest.get<{ - privilege: Omit & { permissions: unknown }; - }>(`/api/v1/additional-privilege/users/${privilegeId}`); - return { - ...privilege, - permissions: unpackRules(privilege.permissions as PackRule[]) - }; + privilege: Omit & { permissions: TProjectPermission[] }; + }>(`/api/v1/user-project-additional-privilege/${privilegeId}`); + return privilege; }; export const useGetProjectUserPrivilegeDetails = (privilegeId: string) => { @@ -41,10 +37,10 @@ export const useListProjectUserPrivileges = (projectMembershipId: string) => { data: { privileges } } = await apiRequest.get<{ privileges: Array & { permissions: unknown }>; - }>("/api/v1/additional-privilege/users", { params: { projectMembershipId } }); + }>("/api/v1/user-project-additional-privilege", { params: { projectMembershipId } }); return privileges.map((el) => ({ ...el, - permissions: unpackRules(el.permissions as PackRule[]) + permissions: el.permissions as TProjectPermission[] })); } }); diff --git a/frontend/src/hooks/api/projectUserAdditionalPrivilege/types.tsx b/frontend/src/hooks/api/projectUserAdditionalPrivilege/types.tsx index b757a07ab4..c3faae3696 100644 --- a/frontend/src/hooks/api/projectUserAdditionalPrivilege/types.tsx +++ b/frontend/src/hooks/api/projectUserAdditionalPrivilege/types.tsx @@ -12,29 +12,35 @@ export type TProjectUserPrivilege = { updatedAt: Date; permissions?: TProjectPermission[]; } & ( - | { + | { isTemporary: true; temporaryMode: string; temporaryRange: string; temporaryAccessStartTime: string; temporaryAccessEndTime?: string; } - | { + | { isTemporary: false; temporaryMode?: null; temporaryRange?: null; temporaryAccessStartTime?: null; temporaryAccessEndTime?: null; } - ); +); export type TCreateProjectUserPrivilegeDTO = { projectMembershipId: string; slug?: string; - isTemporary?: boolean; - temporaryMode?: ProjectUserAdditionalPrivilegeTemporaryMode; - temporaryRange?: string; - temporaryAccessStartTime?: string; + type: + | { + isTemporary: true; + temporaryMode?: ProjectUserAdditionalPrivilegeTemporaryMode; + temporaryRange?: string; + temporaryAccessStartTime?: string; + } + | { + isTemporary: false; + }; permissions: TProjectPermission[]; }; diff --git a/frontend/src/hooks/api/rateLimit/index.ts b/frontend/src/hooks/api/rateLimit/index.ts new file mode 100644 index 0000000000..f3f81b1c96 --- /dev/null +++ b/frontend/src/hooks/api/rateLimit/index.ts @@ -0,0 +1,2 @@ +export { useUpdateRateLimit } from "./mutation"; +export { useGetRateLimit } from "./queries"; diff --git a/frontend/src/hooks/api/rateLimit/mutation.ts b/frontend/src/hooks/api/rateLimit/mutation.ts new file mode 100644 index 0000000000..22a7f9898a --- /dev/null +++ b/frontend/src/hooks/api/rateLimit/mutation.ts @@ -0,0 +1,21 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { rateLimitQueryKeys } from "./queries"; +import { TRateLimit } from "./types"; + +export const useUpdateRateLimit = () => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async (opt) => { + const { data } = await apiRequest.put<{ rateLimit: TRateLimit }>("/api/v1/rate-limit", opt); + return data.rateLimit; + }, + onSuccess: (data) => { + queryClient.setQueryData(rateLimitQueryKeys.rateLimit(), data); + queryClient.invalidateQueries(rateLimitQueryKeys.rateLimit()); + } + }); +}; diff --git a/frontend/src/hooks/api/rateLimit/queries.ts b/frontend/src/hooks/api/rateLimit/queries.ts new file mode 100644 index 0000000000..5a52ff74bf --- /dev/null +++ b/frontend/src/hooks/api/rateLimit/queries.ts @@ -0,0 +1,34 @@ +import { useQuery, UseQueryOptions } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { TRateLimit } from "./types"; + +export const rateLimitQueryKeys = { + rateLimit: () => ["rate-limit"] as const +}; + +const fetchRateLimit = async () => { + const { data } = await apiRequest.get<{ rateLimit: TRateLimit }>("/api/v1/rate-limit"); + return data.rateLimit; +}; + +export const useGetRateLimit = ({ + options = {} +}: { + options?: Omit< + UseQueryOptions< + TRateLimit, + unknown, + TRateLimit, + ReturnType + >, + "queryKey" | "queryFn" + >; +} = {}) => + useQuery({ + queryKey: rateLimitQueryKeys.rateLimit(), + queryFn: fetchRateLimit, + ...options, + enabled: options?.enabled ?? true + }); diff --git a/frontend/src/hooks/api/rateLimit/types.ts b/frontend/src/hooks/api/rateLimit/types.ts new file mode 100644 index 0000000000..53b075ce43 --- /dev/null +++ b/frontend/src/hooks/api/rateLimit/types.ts @@ -0,0 +1,9 @@ +export type TRateLimit = { + readRateLimit: number; + writeRateLimit: number; + secretsRateLimit: number; + authRateLimit: number; + inviteUserRateLimit: number; + mfaRateLimit: number; + publicEndpointLimit: number; +}; diff --git a/frontend/src/hooks/api/roles/index.tsx b/frontend/src/hooks/api/roles/index.tsx index 53c05a7b6b..fd8e4e7050 100644 --- a/frontend/src/hooks/api/roles/index.tsx +++ b/frontend/src/hooks/api/roles/index.tsx @@ -7,6 +7,7 @@ export { useUpdateProjectRole } from "./mutation"; export { + useGetOrgRole, useGetOrgRoles, useGetProjectRoleBySlug, useGetProjectRoles, diff --git a/frontend/src/hooks/api/roles/mutation.tsx b/frontend/src/hooks/api/roles/mutation.tsx index ae3e170de7..1562fbf1af 100644 --- a/frontend/src/hooks/api/roles/mutation.tsx +++ b/frontend/src/hooks/api/roles/mutation.tsx @@ -9,6 +9,8 @@ import { TCreateProjectRoleDTO, TDeleteOrgRoleDTO, TDeleteProjectRoleDTO, + TOrgRole, + TProjectRole, TUpdateOrgRoleDTO, TUpdateProjectRoleDTO } from "./types"; @@ -16,11 +18,15 @@ import { export const useCreateProjectRole = () => { const queryClient = useQueryClient(); - return useMutation({ - mutationFn: ({ projectSlug, ...dto }: TCreateProjectRoleDTO) => - apiRequest.post(`/api/v1/workspace/${projectSlug}/roles`, dto), - onSuccess: (_, { projectSlug }) => { - queryClient.invalidateQueries(roleQueryKeys.getProjectRoles(projectSlug)); + return useMutation({ + mutationFn: async ({ projectId, ...dto }: TCreateProjectRoleDTO) => { + const { + data: { role } + } = await apiRequest.post(`/api/v2/workspace/${projectId}/roles`, dto); + return role; + }, + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(roleQueryKeys.getProjectRoles(projectId)); } }); }; @@ -28,23 +34,33 @@ export const useCreateProjectRole = () => { export const useUpdateProjectRole = () => { const queryClient = useQueryClient(); - return useMutation({ - mutationFn: ({ id, projectSlug, ...dto }: TUpdateProjectRoleDTO) => - apiRequest.patch(`/api/v1/workspace/${projectSlug}/roles/${id}`, dto), - onSuccess: (_, { projectSlug }) => { - queryClient.invalidateQueries(roleQueryKeys.getProjectRoles(projectSlug)); + return useMutation({ + mutationFn: async ({ id, projectId, ...dto }: TUpdateProjectRoleDTO) => { + const { + data: { role } + } = await apiRequest.patch(`/api/v2/workspace/${projectId}/roles/${id}`, dto); + return role; + }, + onSuccess: (_, { projectId, slug }) => { + queryClient.invalidateQueries(roleQueryKeys.getProjectRoles(projectId)); + if (slug) { + queryClient.invalidateQueries(roleQueryKeys.getProjectRoleBySlug(projectId, slug)); + } } }); }; export const useDeleteProjectRole = () => { const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: ({ projectSlug, id }: TDeleteProjectRoleDTO) => - apiRequest.delete(`/api/v1/workspace/${projectSlug}/roles/${id}`), - onSuccess: (_, { projectSlug }) => { - queryClient.invalidateQueries(roleQueryKeys.getProjectRoles(projectSlug)); + return useMutation({ + mutationFn: async ({ projectId, id }: TDeleteProjectRoleDTO) => { + const { + data: { role } + } = await apiRequest.delete(`/api/v2/workspace/${projectId}/roles/${id}`); + return role; + }, + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(roleQueryKeys.getProjectRoles(projectId)); } }); }; @@ -52,12 +68,17 @@ export const useDeleteProjectRole = () => { export const useCreateOrgRole = () => { const queryClient = useQueryClient(); - return useMutation({ - mutationFn: ({ orgId, permissions, ...dto }: TCreateOrgRoleDTO) => - apiRequest.post(`/api/v1/organization/${orgId}/roles`, { + return useMutation({ + mutationFn: async ({ orgId, permissions, ...dto }: TCreateOrgRoleDTO) => { + const { + data: { role } + } = await apiRequest.post(`/api/v1/organization/${orgId}/roles`, { ...dto, permissions: permissions.length ? packRules(permissions) : [] - }), + }); + + return role; + }, onSuccess: (_, { orgId }) => { queryClient.invalidateQueries(roleQueryKeys.getOrgRoles(orgId)); } @@ -67,14 +88,20 @@ export const useCreateOrgRole = () => { export const useUpdateOrgRole = () => { const queryClient = useQueryClient(); - return useMutation({ - mutationFn: ({ id, orgId, permissions, ...dto }: TUpdateOrgRoleDTO) => - apiRequest.patch(`/api/v1/organization/${orgId}/roles/${id}`, { + return useMutation({ + mutationFn: async ({ id, orgId, permissions, ...dto }: TUpdateOrgRoleDTO) => { + const { + data: { role } + } = await apiRequest.patch(`/api/v1/organization/${orgId}/roles/${id}`, { ...dto, - permissions: permissions?.length ? packRules(permissions) : [] - }), - onSuccess: (_, { orgId }) => { + permissions: permissions?.length ? packRules(permissions) : undefined + }); + + return role; + }, + onSuccess: (_, { id, orgId }) => { queryClient.invalidateQueries(roleQueryKeys.getOrgRoles(orgId)); + queryClient.invalidateQueries(roleQueryKeys.getOrgRole(orgId, id)); } }); }; @@ -82,13 +109,19 @@ export const useUpdateOrgRole = () => { export const useDeleteOrgRole = () => { const queryClient = useQueryClient(); - return useMutation({ - mutationFn: ({ orgId, id }: TDeleteOrgRoleDTO) => - apiRequest.delete(`/api/v1/organization/${orgId}/roles/${id}`, { + return useMutation({ + mutationFn: async ({ orgId, id }: TDeleteOrgRoleDTO) => { + const { + data: { role } + } = await apiRequest.delete(`/api/v1/organization/${orgId}/roles/${id}`, { data: { orgId } - }), - onSuccess: (_, { orgId }) => { + }); + + return role; + }, + onSuccess: (_, { id, orgId }) => { queryClient.invalidateQueries(roleQueryKeys.getOrgRoles(orgId)); + queryClient.invalidateQueries(roleQueryKeys.getOrgRole(orgId, id)); } }); }; diff --git a/frontend/src/hooks/api/roles/queries.tsx b/frontend/src/hooks/api/roles/queries.tsx index f04af697db..7333cb4afc 100644 --- a/frontend/src/hooks/api/roles/queries.tsx +++ b/frontend/src/hooks/api/roles/queries.tsx @@ -7,6 +7,8 @@ import picomatch from "picomatch"; import { apiRequest } from "@app/config/request"; import { OrgPermissionSet } from "@app/context/OrgPermissionContext/types"; import { ProjectPermissionSet } from "@app/context/ProjectPermissionContext/types"; +import { groupBy } from "@app/lib/fn/array"; +import { omit } from "@app/lib/fn/object"; import { OrgUser, TProjectMembership } from "../users/types"; import { @@ -36,40 +38,41 @@ const glob: JsInterpreter> = (node, object, context) => { const conditionsMatcher = buildMongoQueryMatcher({ $glob }, { glob }); export const roleQueryKeys = { - getProjectRoles: (projectSlug: string) => ["roles", { projectSlug }] as const, - getProjectRoleBySlug: (projectSlug: string, roleSlug: string) => - ["roles", { projectSlug, roleSlug }] as const, + getProjectRoles: (projectId: string) => ["roles", { projectId }] as const, + getProjectRoleBySlug: (projectId: string, roleSlug: string) => + ["roles", { projectId, roleSlug }] as const, getOrgRoles: (orgId: string) => ["org-roles", { orgId }] as const, + getOrgRole: (orgId: string, roleId: string) => [{ orgId, roleId }, "org-role"] as const, getUserOrgPermissions: ({ orgId }: TGetUserOrgPermissionsDTO) => ["user-permissions", { orgId }] as const, getUserProjectPermissions: ({ workspaceId }: TGetUserProjectPermissionDTO) => ["user-project-permissions", { workspaceId }] as const }; -const getProjectRoles = async (projectId: string) => { +export const getProjectRoles = async (projectId: string) => { const { data } = await apiRequest.get<{ roles: Array> }>( - `/api/v1/workspace/${projectId}/roles` + `/api/v2/workspace/${projectId}/roles` ); return data.roles; }; -export const useGetProjectRoles = (projectSlug: string) => +export const useGetProjectRoles = (projectId: string) => useQuery({ - queryKey: roleQueryKeys.getProjectRoles(projectSlug), - queryFn: () => getProjectRoles(projectSlug), - enabled: Boolean(projectSlug) + queryKey: roleQueryKeys.getProjectRoles(projectId), + queryFn: () => getProjectRoles(projectId), + enabled: Boolean(projectId) }); -export const useGetProjectRoleBySlug = (projectSlug: string, roleSlug: string) => +export const useGetProjectRoleBySlug = (projectId: string, roleSlug: string) => useQuery({ - queryKey: roleQueryKeys.getProjectRoleBySlug(projectSlug, roleSlug), + queryKey: roleQueryKeys.getProjectRoleBySlug(projectId, roleSlug), queryFn: async () => { const { data } = await apiRequest.get<{ role: TProjectRole }>( - `/api/v1/workspace/${projectSlug}/roles/slug/${roleSlug}` + `/api/v2/workspace/${projectId}/roles/slug/${roleSlug}` ); return data.role; }, - enabled: Boolean(projectSlug && roleSlug) + enabled: Boolean(projectId && roleSlug) }); const getOrgRoles = async (orgId: string) => { @@ -89,6 +92,21 @@ export const useGetOrgRoles = (orgId: string, enable = true) => enabled: Boolean(orgId) && enable }); +export const useGetOrgRole = (orgId: string, roleId: string) => + useQuery({ + queryKey: roleQueryKeys.getOrgRole(orgId, roleId), + queryFn: async () => { + const { data } = await apiRequest.get<{ + role: Omit & { permissions: unknown }; + }>(`/api/v1/organization/${orgId}/roles/${roleId}`); + return { + ...data.role, + permissions: unpackRules(data.role.permissions as PackRule[]) + }; + }, + enabled: Boolean(orgId && roleId) + }); + const getUserOrgPermissions = async ({ orgId }: TGetUserOrgPermissionsDTO) => { if (orgId === "") return { permissions: [], membership: null }; @@ -130,8 +148,32 @@ export const useGetUserProjectPermissions = ({ workspaceId }: TGetUserProjectPer enabled: Boolean(workspaceId), select: (data) => { const rule = unpackRules>>(data.permissions); - const ability = createMongoAbility(rule, { conditionsMatcher }); + const negatedRules = groupBy( + rule.filter((i) => i.inverted && i.conditions), + (i) => `${i.subject}-${JSON.stringify(i.conditions)}` + ); + const ability = createMongoAbility(rule, { + // this allows in frontend to skip some rules using * + conditionsMatcher: (rules) => { + return (entity) => { + // skip validation if its negated rules + const isNegatedRule = + // eslint-disable-next-line no-underscore-dangle + negatedRules?.[`${entity.__caslSubjectType__}-${JSON.stringify(rules)}`]; + if (isNegatedRule) { + const baseMatcher = conditionsMatcher(rules); + return baseMatcher(entity); + } + const rulesStrippedOfWildcard = omit( + rules, + Object.keys(entity).filter((el) => entity[el]?.includes("*")) + ); + const baseMatcher = conditionsMatcher(rulesStrippedOfWildcard); + return baseMatcher(entity); + }; + } + }); const membership = { ...data.membership, roles: data.membership.roles.map(({ role }) => role) diff --git a/frontend/src/hooks/api/roles/types.ts b/frontend/src/hooks/api/roles/types.ts index e2d1b533ad..0a48c9f977 100644 --- a/frontend/src/hooks/api/roles/types.ts +++ b/frontend/src/hooks/api/roles/types.ts @@ -40,7 +40,8 @@ export type TPermission = { export type TProjectPermission = { conditions?: Record; - action: string; + inverted?: boolean; + action: string | string[]; subject: string | string[]; }; @@ -71,7 +72,7 @@ export type TDeleteOrgRoleDTO = { }; export type TCreateProjectRoleDTO = { - projectSlug: string; + projectId: string; name: string; description?: string; slug: string; @@ -79,11 +80,11 @@ export type TCreateProjectRoleDTO = { }; export type TUpdateProjectRoleDTO = { - projectSlug: string; + projectId: string; id: string; } & Partial>; export type TDeleteProjectRoleDTO = { - projectSlug: string; + projectId: string; id: string; }; diff --git a/frontend/src/hooks/api/secretApproval/mutation.tsx b/frontend/src/hooks/api/secretApproval/mutation.tsx index e0a8df95da..ceebd34931 100644 --- a/frontend/src/hooks/api/secretApproval/mutation.tsx +++ b/frontend/src/hooks/api/secretApproval/mutation.tsx @@ -9,14 +9,23 @@ export const useCreateSecretApprovalPolicy = () => { const queryClient = useQueryClient(); return useMutation<{}, {}, TCreateSecretPolicyDTO>({ - mutationFn: async ({ environment, workspaceId, approvals, approvers, secretPath, name }) => { + mutationFn: async ({ + environment, + workspaceId, + approvals, + approvers, + secretPath, + name, + enforcementLevel + }) => { const { data } = await apiRequest.post("/api/v1/secret-approvals", { environment, workspaceId, approvals, approvers, secretPath, - name + name, + enforcementLevel }); return data; }, @@ -30,12 +39,13 @@ export const useUpdateSecretApprovalPolicy = () => { const queryClient = useQueryClient(); return useMutation<{}, {}, TUpdateSecretPolicyDTO>({ - mutationFn: async ({ id, approvers, approvals, secretPath, name }) => { + mutationFn: async ({ id, approvers, approvals, secretPath, name, enforcementLevel }) => { const { data } = await apiRequest.patch(`/api/v1/secret-approvals/${id}`, { approvals, approvers, secretPath, - name + name, + enforcementLevel }); return data; }, diff --git a/frontend/src/hooks/api/secretApproval/types.ts b/frontend/src/hooks/api/secretApproval/types.ts index 38e8d7ee8e..fcf83086c7 100644 --- a/frontend/src/hooks/api/secretApproval/types.ts +++ b/frontend/src/hooks/api/secretApproval/types.ts @@ -1,3 +1,4 @@ +import { EnforcementLevel } from "../policies/enums"; import { WorkspaceEnv } from "../workspace/types"; export type TSecretApprovalPolicy = { @@ -7,10 +8,22 @@ export type TSecretApprovalPolicy = { envId: string; environment: WorkspaceEnv; secretPath?: string; - approvers: string[]; approvals: number; + approvers: Approver[]; + updatedAt: Date; + enforcementLevel: EnforcementLevel; }; +export enum ApproverType{ + User = "user", + Group = "group" +} + +export type Approver ={ + id: string; + type: ApproverType; +} + export type TGetSecretApprovalPoliciesDTO = { workspaceId: string; }; @@ -26,16 +39,18 @@ export type TCreateSecretPolicyDTO = { name?: string; environment: string; secretPath?: string | null; - approvers?: string[]; + approvers?: Approver[]; approvals?: number; + enforcementLevel: EnforcementLevel; }; export type TUpdateSecretPolicyDTO = { id: string; name?: string; - approvers?: string[]; + approvers?: Approver[]; secretPath?: string | null; approvals?: number; + enforcementLevel?: EnforcementLevel; // for invalidating list workspaceId: string; }; diff --git a/frontend/src/hooks/api/secretApprovalRequest/mutation.tsx b/frontend/src/hooks/api/secretApprovalRequest/mutation.tsx index 94358ed798..f3b3896446 100644 --- a/frontend/src/hooks/api/secretApprovalRequest/mutation.tsx +++ b/frontend/src/hooks/api/secretApprovalRequest/mutation.tsx @@ -46,8 +46,10 @@ export const usePerformSecretApprovalRequestMerge = () => { const queryClient = useQueryClient(); return useMutation<{}, {}, TPerformSecretApprovalRequestMerge>({ - mutationFn: async ({ id }) => { - const { data } = await apiRequest.post(`/api/v1/secret-approval-requests/${id}/merge`); + mutationFn: async ({ id, bypassReason }) => { + const { data } = await apiRequest.post(`/api/v1/secret-approval-requests/${id}/merge`, { + bypassReason + }); return data; }, onSuccess: (_, { id, workspaceId }) => { diff --git a/frontend/src/hooks/api/secretApprovalRequest/queries.tsx b/frontend/src/hooks/api/secretApprovalRequest/queries.tsx index aaf84941af..a153160185 100644 --- a/frontend/src/hooks/api/secretApprovalRequest/queries.tsx +++ b/frontend/src/hooks/api/secretApprovalRequest/queries.tsx @@ -1,3 +1,4 @@ +/* eslint-disable no-param-reassign */ import { useInfiniteQuery, UseInfiniteQueryOptions, @@ -12,16 +13,13 @@ import { import { apiRequest } from "@app/config/request"; import { UserWsKeyPair } from "../keys/types"; -import { decryptSecrets } from "../secrets/queries"; -import { DecryptedSecret } from "../secrets/types"; +import { EncryptedSecret, SecretType, SecretV3RawSanitized } from "../secrets/types"; import { - CommitType, TGetSecretApprovalRequestCount, TGetSecretApprovalRequestDetails, TGetSecretApprovalRequestList, TSecretApprovalRequest, - TSecretApprovalRequestCount, - TSecretApprovalSecChangeData + TSecretApprovalRequestCount } from "./types"; export const secretApprovalRequestKeys = { @@ -45,8 +43,8 @@ export const secretApprovalRequestKeys = { ] }; -export const decryptSecretApprovalSecret = ( - encSecret: TSecretApprovalSecChangeData, +export const decryptSecrets = ( + encryptedSecrets: EncryptedSecret[], decryptFileKey: UserWsKeyPair ) => { const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; @@ -57,34 +55,64 @@ export const decryptSecretApprovalSecret = ( privateKey: PRIVATE_KEY }); - const secretKey = decryptSymmetric({ - ciphertext: encSecret.secretKeyCiphertext, - iv: encSecret.secretKeyIV, - tag: encSecret.secretKeyTag, - key + const personalSecrets: Record = {}; + const secrets: SecretV3RawSanitized[] = []; + encryptedSecrets.forEach((encSecret) => { + const secretKey = decryptSymmetric({ + ciphertext: encSecret.secretKeyCiphertext, + iv: encSecret.secretKeyIV, + tag: encSecret.secretKeyTag, + key + }); + + const secretValue = decryptSymmetric({ + ciphertext: encSecret.secretValueCiphertext, + iv: encSecret.secretValueIV, + tag: encSecret.secretValueTag, + key + }); + + const secretComment = decryptSymmetric({ + ciphertext: encSecret.secretCommentCiphertext, + iv: encSecret.secretCommentIV, + tag: encSecret.secretCommentTag, + key + }); + + const decryptedSecret: SecretV3RawSanitized = { + id: encSecret.id, + env: encSecret.environment, + key: secretKey, + value: secretValue, + tags: encSecret.tags, + comment: secretComment, + reminderRepeatDays: encSecret.secretReminderRepeatDays, + reminderNote: encSecret.secretReminderNote, + createdAt: encSecret.createdAt, + updatedAt: encSecret.updatedAt, + version: encSecret.version, + skipMultilineEncoding: encSecret.skipMultilineEncoding + }; + + if (encSecret.type === SecretType.Personal) { + personalSecrets[decryptedSecret.key] = { + id: encSecret.id, + value: secretValue + }; + } else { + secrets.push(decryptedSecret); + } }); - const secretValue = decryptSymmetric({ - ciphertext: encSecret.secretValueCiphertext, - iv: encSecret.secretValueIV, - tag: encSecret.secretValueTag, - key + secrets.forEach((sec) => { + if (personalSecrets?.[sec.key]) { + sec.idOverride = personalSecrets[sec.key].id; + sec.valueOverride = personalSecrets[sec.key].value; + sec.overrideAction = "modified"; + } }); - const secretComment = decryptSymmetric({ - ciphertext: encSecret.secretCommentCiphertext, - iv: encSecret.secretCommentIV, - tag: encSecret.secretCommentTag, - key - }); - return { - id: encSecret.id, - version: encSecret.version, - secretKey, - secretValue, - secretComment, - tags: encSecret.tags - }; + return secrets; }; const fetchSecretApprovalRequestList = async ({ @@ -166,14 +194,13 @@ const fetchSecretApprovalRequestDetails = async ({ export const useGetSecretApprovalRequestDetails = ({ id, - decryptKey, options = {} }: TGetSecretApprovalRequestDetails & { options?: Omit< UseQueryOptions< TSecretApprovalRequest, unknown, - TSecretApprovalRequest, + TSecretApprovalRequest, ReturnType >, "queryKey" | "queryFn" @@ -182,17 +209,7 @@ export const useGetSecretApprovalRequestDetails = ({ useQuery({ queryKey: secretApprovalRequestKeys.detail({ id }), queryFn: () => fetchSecretApprovalRequestDetails({ id }), - select: (data) => ({ - ...data, - commits: data.commits.map(({ secretVersion, op, secret, ...newVersion }) => ({ - op, - secret, - secretVersion: secretVersion ? decryptSecrets([secretVersion], decryptKey)[0] : undefined, - newVersion: - op !== CommitType.DELETE ? decryptSecretApprovalSecret(newVersion, decryptKey) : undefined - })) - }), - enabled: Boolean(id && decryptKey) && (options?.enabled ?? true) + enabled: Boolean(id) && (options?.enabled ?? true) }); const fetchSecretApprovalRequestCount = async ({ workspaceId }: TGetSecretApprovalRequestCount) => { diff --git a/frontend/src/hooks/api/secretApprovalRequest/types.ts b/frontend/src/hooks/api/secretApprovalRequest/types.ts index 8c2ba69636..c8a6851151 100644 --- a/frontend/src/hooks/api/secretApprovalRequest/types.ts +++ b/frontend/src/hooks/api/secretApprovalRequest/types.ts @@ -1,6 +1,5 @@ -import { UserWsKeyPair } from "../keys/types"; import { TSecretApprovalPolicy } from "../secretApproval/types"; -import { EncryptedSecret } from "../secrets/types"; +import { SecretV3Raw } from "../secrets/types"; import { WsTag } from "../tags/types"; export enum ApprovalStatus { @@ -17,15 +16,9 @@ export enum CommitType { export type TSecretApprovalSecChangeData = { id: string; - secretKeyCiphertext: string; - secretKeyIV: string; - secretKeyTag: string; - secretValueCiphertext: string; - secretValueIV: string; - secretValueTag: string; - secretCommentIV: string; - secretCommentTag: string; - secretCommentCiphertext: string; + secretKey: string; + secretValue?: string; + secretComment?: string; skipMultilineEncoding?: boolean; algorithm: "aes-256-gcm"; keyEncoding: "utf8" | "base64"; @@ -37,20 +30,24 @@ export type TSecretApprovalSecChange = { id: string; version: number; secretKey: string; - secretValue: string; - secretComment: string; + secretValue?: string; + secretComment?: string; tags?: string[]; }; -export type TSecretApprovalRequest = { +export type TSecretApprovalRequest = { id: string; isReplicated?: boolean; slug: string; createdAt: string; - committerId: string; + committerUserId: string; reviewers: { - member: string; + userId: string; status: ApprovalStatus; + email: string; + firstName: string; + lastName: string; + username: string; }[]; workspace: string; environment: string; @@ -58,13 +55,35 @@ export type TSecretApprovalRequest = { secretPath: string; hasMerged: boolean; status: "open" | "close"; - policy: TSecretApprovalPolicy; - statusChangeBy: string; + policy: Omit & { + approvers: { + userId: string; + email: string; + firstName: string; + lastName: string; + username: string; + }[]; + }; + statusChangedByUserId: string; + statusChangedByUser?: { + userId: string; + email: string; + firstName: string; + lastName: string; + username: string; + }; + committerUser: { + userId: string; + email: string; + firstName: string; + lastName: string; + username: string; + }; conflicts: Array<{ secretId: string; op: CommitType.UPDATE }>; commits: ({ // if there is no secret means it was creation secret?: { version: number }; - secretVersion: J; + secretVersion: SecretV3Raw; // if there is no new version its for Delete op: CommitType; } & TSecretApprovalSecChangeData)[]; @@ -90,7 +109,6 @@ export type TGetSecretApprovalRequestCount = { export type TGetSecretApprovalRequestDetails = { id: string; - decryptKey: UserWsKeyPair; }; export type TUpdateSecretApprovalReviewStatusDTO = { @@ -107,4 +125,5 @@ export type TUpdateSecretApprovalRequestStatusDTO = { export type TPerformSecretApprovalRequestMerge = { id: string; workspaceId: string; + bypassReason?: string; }; diff --git a/frontend/src/hooks/api/secretFolders/queries.tsx b/frontend/src/hooks/api/secretFolders/queries.tsx index 236a13a26b..d85bc558a8 100644 --- a/frontend/src/hooks/api/secretFolders/queries.tsx +++ b/frontend/src/hooks/api/secretFolders/queries.tsx @@ -8,6 +8,7 @@ import { } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; +import { dashboardKeys } from "@app/hooks/api/dashboard/queries"; import { secretSnapshotKeys } from "../secretSnapshots/queries"; import { @@ -124,6 +125,12 @@ export const useCreateFolder = () => { return data; }, onSuccess: (_, { projectId, environment, path }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ + projectId, + secretPath: path ?? "/" + }) + ); queryClient.invalidateQueries( folderQueryKeys.getSecretFolders({ projectId, environment, path }) ); @@ -151,6 +158,12 @@ export const useUpdateFolder = () => { return data; }, onSuccess: (_, { projectId, environment, path }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ + projectId, + secretPath: path ?? "/" + }) + ); queryClient.invalidateQueries( folderQueryKeys.getSecretFolders({ projectId, environment, path }) ); @@ -179,6 +192,12 @@ export const useDeleteFolder = () => { return data; }, onSuccess: (_, { path = "/", projectId, environment }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ + projectId, + secretPath: path + }) + ); queryClient.invalidateQueries( folderQueryKeys.getSecretFolders({ projectId, environment, path }) ); @@ -206,6 +225,12 @@ export const useUpdateFolderBatch = () => { }, onSuccess: (_, { projectId, folders }) => { folders.forEach((folder) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ + projectId, + secretPath: folder.path ?? "/" + }) + ); queryClient.invalidateQueries( folderQueryKeys.getSecretFolders({ projectId, diff --git a/frontend/src/hooks/api/secretImports/mutation.tsx b/frontend/src/hooks/api/secretImports/mutation.tsx index 04f1f01e6a..4bee1a4ed5 100644 --- a/frontend/src/hooks/api/secretImports/mutation.tsx +++ b/frontend/src/hooks/api/secretImports/mutation.tsx @@ -1,6 +1,7 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; +import { dashboardKeys } from "@app/hooks/api/dashboard/queries"; import { secretImportKeys } from "./queries"; import { @@ -31,6 +32,9 @@ export const useCreateSecretImport = () => { queryClient.invalidateQueries( secretImportKeys.getSecretImportSecrets({ projectId, environment, path }) ); + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId, secretPath: path ?? "/" }) + ); } }); }; @@ -55,6 +59,9 @@ export const useUpdateSecretImport = () => { queryClient.invalidateQueries( secretImportKeys.getSecretImportSecrets({ environment, path, projectId }) ); + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId, secretPath: path ?? "/" }) + ); } }); }; @@ -93,6 +100,9 @@ export const useDeleteSecretImport = () => { queryClient.invalidateQueries( secretImportKeys.getSecretImportSecrets({ projectId, environment, path }) ); + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId, secretPath: path ?? "/" }) + ); } }); }; diff --git a/frontend/src/hooks/api/secretImports/queries.tsx b/frontend/src/hooks/api/secretImports/queries.tsx index 2879c859bf..c2ddd2fbe2 100644 --- a/frontend/src/hooks/api/secretImports/queries.tsx +++ b/frontend/src/hooks/api/secretImports/queries.tsx @@ -1,10 +1,6 @@ import { useCallback } from "react"; import { useQueries, useQuery, UseQueryOptions } from "@tanstack/react-query"; -import { - decryptAssymmetric, - decryptSymmetric -} from "@app/components/utilities/cryptography/crypto"; import { apiRequest } from "@app/config/request"; import { @@ -75,7 +71,7 @@ const fetchImportedSecrets = async ( directory?: string ) => { const { data } = await apiRequest.get<{ secrets: TImportedSecrets[] }>( - "/api/v1/secret-imports/secrets", + "/api/v1/secret-imports/secrets/raw", { params: { workspaceId, @@ -107,7 +103,6 @@ const fetchImportedFolders = async ({ export const useGetImportedSecretsSingleEnv = ({ environment, - decryptFileKey, path, projectId, options = {} @@ -123,78 +118,40 @@ export const useGetImportedSecretsSingleEnv = ({ >; }) => useQuery({ - enabled: - Boolean(projectId) && - Boolean(environment) && - Boolean(decryptFileKey) && - (options?.enabled ?? true), + enabled: Boolean(projectId) && Boolean(environment) && (options?.enabled ?? true), queryKey: secretImportKeys.getSecretImportSecrets({ environment, path, projectId }), queryFn: () => fetchImportedSecrets(projectId, environment, path), - select: useCallback( - (data: TImportedSecrets[]) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const latestKey = decryptFileKey; - const key = decryptAssymmetric({ - ciphertext: latestKey.encryptedKey, - nonce: latestKey.nonce, - publicKey: latestKey.sender.publicKey, - privateKey: PRIVATE_KEY - }); - - return data.map((el) => ({ - environment: el.environment, - secretPath: el.secretPath, - environmentInfo: el.environmentInfo, - folderId: el.folderId, - secrets: el.secrets.map((encSecret) => { - const secretKey = decryptSymmetric({ - ciphertext: encSecret.secretKeyCiphertext, - iv: encSecret.secretKeyIV, - tag: encSecret.secretKeyTag, - key - }); - - const secretValue = decryptSymmetric({ - ciphertext: encSecret.secretValueCiphertext, - iv: encSecret.secretValueIV, - tag: encSecret.secretValueTag, - key - }); - - const secretComment = decryptSymmetric({ - ciphertext: encSecret.secretCommentCiphertext, - iv: encSecret.secretCommentIV, - tag: encSecret.secretCommentTag, - key - }); - - return { - id: encSecret.id, - env: encSecret.environment, - key: secretKey, - value: secretValue, - tags: encSecret.tags, - comment: secretComment, - createdAt: encSecret.createdAt, - updatedAt: encSecret.updatedAt, - version: encSecret.version - }; - }) - })); - }, - [decryptFileKey] - ) + select: (data: TImportedSecrets[]) => { + return data.map((el) => ({ + environment: el.environment, + secretPath: el.secretPath, + environmentInfo: el.environmentInfo, + folderId: el.folderId, + secrets: el.secrets.map((encSecret) => { + return { + id: encSecret.id, + env: encSecret.environment, + key: encSecret.secretKey, + value: encSecret.secretValue, + tags: encSecret.tags, + comment: encSecret.secretComment, + createdAt: encSecret.createdAt, + updatedAt: encSecret.updatedAt, + version: encSecret.version + }; + }) + })); + } }); export const useGetImportedSecretsAllEnvs = ({ projectId, environments, - path = "/", - decryptFileKey + path = "/" }: TGetSecretImportsAllEnvs) => { const secretImports = useQueries({ queries: environments.map((env) => ({ @@ -207,70 +164,54 @@ export const useGetImportedSecretsAllEnvs = ({ enabled: Boolean(projectId) && Boolean(env), // eslint-disable-next-line react-hooks/rules-of-hooks select: useCallback( - (data: TImportedSecrets[]) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const latestKey = decryptFileKey; - const key = decryptAssymmetric({ - ciphertext: latestKey.encryptedKey, - nonce: latestKey.nonce, - publicKey: latestKey.sender.publicKey, - privateKey: PRIVATE_KEY - }); - - return data.map((el) => ({ + (data: Awaited>) => + data.map((el) => ({ environment: el.environment, secretPath: el.secretPath, environmentInfo: el.environmentInfo, folderId: el.folderId, secrets: el.secrets.map((encSecret) => { - const secretKey = decryptSymmetric({ - ciphertext: encSecret.secretKeyCiphertext, - iv: encSecret.secretKeyIV, - tag: encSecret.secretKeyTag, - key - }); - - const secretValue = decryptSymmetric({ - ciphertext: encSecret.secretValueCiphertext, - iv: encSecret.secretValueIV, - tag: encSecret.secretValueTag, - key - }); - - const secretComment = decryptSymmetric({ - ciphertext: encSecret.secretCommentCiphertext, - iv: encSecret.secretCommentIV, - tag: encSecret.secretCommentTag, - key - }); - return { id: encSecret.id, env: encSecret.environment, - key: secretKey, - value: secretValue, + key: encSecret.secretKey, + value: encSecret.secretValue, tags: encSecret.tags, - comment: secretComment, + comment: encSecret.secretComment, createdAt: encSecret.createdAt, updatedAt: encSecret.updatedAt, version: encSecret.version }; }) - })); - }, - [decryptFileKey] + })), + [] ) })) }); + const getEnvImportedSecretKeyCount = useCallback( + (env: string) => { + const selectedEnvIndex = environments.indexOf(env); + let totalSecrets = 0; + + if (selectedEnvIndex !== -1) { + secretImports?.[selectedEnvIndex]?.data?.forEach((secret) => { + totalSecrets += secret.secrets.length; + }); + } + + return totalSecrets; + }, + [(secretImports || []).map((response) => response.data)] + ); + const isImportedSecretPresentInEnv = useCallback( - (secPath: string, envSlug: string, secretName: string) => { + (envSlug: string, secretName: string) => { const selectedEnvIndex = environments.indexOf(envSlug); if (selectedEnvIndex !== -1) { - const isPresent = secretImports?.[selectedEnvIndex]?.data?.find( - ({ secretPath, secrets }) => - secretPath === secPath && secrets.some((s) => s.key === secretName) + const isPresent = secretImports?.[selectedEnvIndex]?.data?.find(({ secrets }) => + secrets.some((s) => s.key === secretName) ); return Boolean(isPresent); @@ -280,7 +221,33 @@ export const useGetImportedSecretsAllEnvs = ({ [(secretImports || []).map((response) => response.data)] ); - return { secretImports, isImportedSecretPresentInEnv }; + const getImportedSecretByKey = useCallback( + (envSlug: string, secretName: string) => { + const selectedEnvIndex = environments.indexOf(envSlug); + + if (selectedEnvIndex !== -1) { + const secret = secretImports?.[selectedEnvIndex]?.data?.find(({ secrets }) => + secrets.find((s) => s.key === secretName) + ); + + if (!secret) return undefined; + + return { + secret: secret?.secrets.find((s) => s.key === secretName), + environmentInfo: secret?.environmentInfo + }; + } + return undefined; + }, + [(secretImports || []).map((response) => response.data)] + ); + + return { + secretImports, + isImportedSecretPresentInEnv, + getImportedSecretByKey, + getEnvImportedSecretKeyCount + }; }; export const useGetImportedFoldersByEnv = ({ diff --git a/frontend/src/hooks/api/secretImports/types.ts b/frontend/src/hooks/api/secretImports/types.ts index 1a6c06dd3b..1bf60bd4e3 100644 --- a/frontend/src/hooks/api/secretImports/types.ts +++ b/frontend/src/hooks/api/secretImports/types.ts @@ -1,5 +1,4 @@ -import { UserWsKeyPair } from "../keys/types"; -import { EncryptedSecret } from "../secrets/types"; +import { SecretV3Raw } from "../secrets/types"; import { WorkspaceEnv } from "../workspace/types"; export type TSecretImport = { @@ -28,7 +27,7 @@ export type TImportedSecrets = { environmentInfo: WorkspaceEnv; secretPath: string; folderId: string; - secrets: EncryptedSecret[]; + secrets: SecretV3Raw[]; }; export type TGetSecretImports = { @@ -39,7 +38,6 @@ export type TGetSecretImports = { export type TGetSecretImportsAllEnvs = { projectId: string; - decryptFileKey: UserWsKeyPair; path?: string; environments: string[]; }; @@ -48,7 +46,6 @@ export type TGetImportedSecrets = { projectId: string; environment: string; path?: string; - decryptFileKey: UserWsKeyPair; }; export type TuseGetImportedFoldersByEnv = { diff --git a/frontend/src/hooks/api/secretRotation/queries.tsx b/frontend/src/hooks/api/secretRotation/queries.tsx index a3c8e270e6..d612189ad7 100644 --- a/frontend/src/hooks/api/secretRotation/queries.tsx +++ b/frontend/src/hooks/api/secretRotation/queries.tsx @@ -1,14 +1,9 @@ -import { useCallback } from "react"; import { useQuery, UseQueryOptions } from "@tanstack/react-query"; -import { - decryptAssymmetric, - decryptSymmetric -} from "@app/components/utilities/cryptography/crypto"; import { apiRequest } from "@app/config/request"; import { - TGetSecretRotationList, + TGetSecretRotationListDTO, TGetSecretRotationProviders, TSecretRotation, TSecretRotationProviderList @@ -19,7 +14,7 @@ export const secretRotationKeys = { { workspaceId }, "secret-rotation-providers" ], - list: ({ workspaceId }: Omit) => + list: ({ workspaceId }: Omit) => [{ workspaceId }, "secret-rotations"] as const }; @@ -53,7 +48,7 @@ export const useGetSecretRotationProviders = ({ const fetchSecretRotations = async ({ workspaceId -}: Omit) => { +}: Omit) => { const { data } = await apiRequest.get<{ secretRotations: TSecretRotation[] }>( "/api/v1/secret-rotations", { params: { workspaceId } } @@ -63,14 +58,13 @@ const fetchSecretRotations = async ({ export const useGetSecretRotations = ({ workspaceId, - decryptFileKey, options = {} -}: TGetSecretRotationList & { +}: TGetSecretRotationListDTO & { options?: Omit< UseQueryOptions< TSecretRotation[], unknown, - TSecretRotation<{ key: string }>[], + TSecretRotation[], ReturnType >, "queryKey" | "queryFn" @@ -80,31 +74,5 @@ export const useGetSecretRotations = ({ ...options, queryKey: secretRotationKeys.list({ workspaceId }), enabled: Boolean(workspaceId) && (options?.enabled ?? true), - queryFn: async () => fetchSecretRotations({ workspaceId }), - select: useCallback( - (data: TSecretRotation[]) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const decryptKey = decryptAssymmetric({ - ciphertext: decryptFileKey.encryptedKey, - nonce: decryptFileKey.nonce, - publicKey: decryptFileKey.sender.publicKey, - privateKey: PRIVATE_KEY - }); - return data.map((el) => ({ - ...el, - outputs: el.outputs.map(({ key, secret }) => ({ - key, - secret: { - key: decryptSymmetric({ - ciphertext: secret.secretValueCiphertext, - iv: secret.secretValueIV, - tag: secret.secretValueTag, - key: decryptKey - }) - } - })) - })); - }, - [decryptFileKey] - ) + queryFn: async () => fetchSecretRotations({ workspaceId }) }); diff --git a/frontend/src/hooks/api/secretRotation/types.ts b/frontend/src/hooks/api/secretRotation/types.ts index b8963a95eb..07a4c70df6 100644 --- a/frontend/src/hooks/api/secretRotation/types.ts +++ b/frontend/src/hooks/api/secretRotation/types.ts @@ -1,5 +1,3 @@ -import { UserWsKeyPair } from "../keys/types"; -import { EncryptedSecret } from "../secrets/types"; import { WorkspaceEnv } from "../workspace/types"; export enum TProviderFunctionTypes { @@ -74,7 +72,7 @@ export type TDbProviderTemplate = { outputs: Record; }; -export type TSecretRotation = { +export type TSecretRotation = { id: string; interval: number; provider: string; @@ -85,7 +83,11 @@ export type TSecretRotation = { secretPath: string; outputs: Array<{ key: string; - secret: T; + secret: { + version: number; + id: string; + secretKey: string; + }; }>; status?: "success" | "failed"; lastRotatedAt?: string; @@ -103,9 +105,8 @@ export type TGetSecretRotationProviders = { workspaceId: string; }; -export type TGetSecretRotationList = { +export type TGetSecretRotationListDTO = { workspaceId: string; - decryptFileKey: UserWsKeyPair; }; export type TCreateSecretRotationDTO = { diff --git a/frontend/src/hooks/api/secretSharing/mutations.ts b/frontend/src/hooks/api/secretSharing/mutations.ts index e21cc08f6d..e805abfd29 100644 --- a/frontend/src/hooks/api/secretSharing/mutations.ts +++ b/frontend/src/hooks/api/secretSharing/mutations.ts @@ -2,34 +2,51 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; -import { TCreateSharedSecretRequest, TDeleteSharedSecretRequest, TSharedSecret } from "./types"; +import { secretSharingKeys } from "./queries"; +import { + TCreatedSharedSecret, + TCreateSharedSecretRequest, + TDeleteSharedSecretRequest, + TSharedSecret +} from "./types"; export const useCreateSharedSecret = () => { const queryClient = useQueryClient(); return useMutation({ mutationFn: async (inputData: TCreateSharedSecretRequest) => { - const { data } = await apiRequest.post("/api/v1/secret-sharing", inputData); + const { data } = await apiRequest.post( + "/api/v1/secret-sharing", + inputData + ); return data; }, - onSuccess: () => queryClient.invalidateQueries(["sharedSecrets"]) + onSuccess: () => queryClient.invalidateQueries(secretSharingKeys.allSharedSecrets()) + }); +}; + +export const useCreatePublicSharedSecret = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (inputData: TCreateSharedSecretRequest) => { + const { data } = await apiRequest.post( + "/api/v1/secret-sharing/public", + inputData + ); + return data; + }, + onSuccess: () => queryClient.invalidateQueries(secretSharingKeys.allSharedSecrets()) }); }; export const useDeleteSharedSecret = () => { const queryClient = useQueryClient(); - return useMutation< - TSharedSecret, - { message: string }, - { sharedSecretId: string } - >({ + return useMutation({ mutationFn: async ({ sharedSecretId }: TDeleteSharedSecretRequest) => { const { data } = await apiRequest.delete( `/api/v1/secret-sharing/${sharedSecretId}` ); return data; }, - onSuccess: () => { - queryClient.invalidateQueries(["sharedSecrets"]); - } + onSuccess: () => queryClient.invalidateQueries(secretSharingKeys.allSharedSecrets()) }); }; diff --git a/frontend/src/hooks/api/secretSharing/queries.ts b/frontend/src/hooks/api/secretSharing/queries.ts index c7970fabc1..479f3ec165 100644 --- a/frontend/src/hooks/api/secretSharing/queries.ts +++ b/frontend/src/hooks/api/secretSharing/queries.ts @@ -4,27 +4,66 @@ import { apiRequest } from "@app/config/request"; import { TSharedSecret, TViewSharedSecretResponse } from "./types"; -export const useGetSharedSecrets = () => { +export const secretSharingKeys = { + allSharedSecrets: () => ["sharedSecrets"] as const, + specificSharedSecrets: ({ offset, limit }: { offset: number; limit: number }) => + [...secretSharingKeys.allSharedSecrets(), { offset, limit }] as const, + getSecretById: (arg: { id: string; hashedHex: string | null; password?: string }) => [ + "shared-secret", + arg + ] +}; + +export const useGetSharedSecrets = ({ + offset = 0, + limit = 25 +}: { + offset: number; + limit: number; +}) => { return useQuery({ - queryKey: ["sharedSecrets"], + queryKey: secretSharingKeys.specificSharedSecrets({ offset, limit }), queryFn: async () => { - const { data } = await apiRequest.get("/api/v1/secret-sharing/"); + const params = new URLSearchParams({ + offset: String(offset), + limit: String(limit) + }); + + const { data } = await apiRequest.get<{ secrets: TSharedSecret[]; totalCount: number }>( + "/api/v1/secret-sharing/", + { + params + } + ); return data; } }); }; -export const useGetActiveSharedSecretByIdAndHashedHex = (id: string, hashedHex: string) => { - return useQuery({ - queryFn: async () => { - const { data } = await apiRequest.get( - `/api/v1/secret-sharing/public/${id}?hashedHex=${hashedHex}` +export const useGetActiveSharedSecretById = ({ + sharedSecretId, + hashedHex, + password +}: { + sharedSecretId: string; + hashedHex: string | null; + password?: string; +}) => { + return useQuery( + secretSharingKeys.getSecretById({ id: sharedSecretId, hashedHex, password }), + async () => { + const { data } = await apiRequest.post( + `/api/v1/secret-sharing/public/${sharedSecretId}`, + { + ...(hashedHex && { hashedHex }), + password + } ); - return { - encryptedValue: data.encryptedValue, - iv: data.iv, - tag: data.tag, - }; + + return data; + }, + { + enabled: Boolean(sharedSecretId) } - }); + ); }; diff --git a/frontend/src/hooks/api/secretSharing/types.ts b/frontend/src/hooks/api/secretSharing/types.ts index 424e3525c7..b9843a711c 100644 --- a/frontend/src/hooks/api/secretSharing/types.ts +++ b/frontend/src/hooks/api/secretSharing/types.ts @@ -4,23 +4,45 @@ export type TSharedSecret = { orgId: string; createdAt: Date; updatedAt: Date; -} & TCreateSharedSecretRequest; - -export type TCreateSharedSecretRequest = { + name: string | null; + lastViewedAt?: Date; + expiresAt: Date; + expiresAfterViews: number | null; encryptedValue: string; iv: string; tag: string; - hashedHex: string; +}; + +export type TCreatedSharedSecret = { + id: string; +}; + +export type TCreateSharedSecretRequest = { + name?: string; + password?: string; + secretValue: string; expiresAt: Date; - expiresAfterViews: number; + expiresAfterViews?: number; + accessType?: SecretSharingAccessType; }; export type TViewSharedSecretResponse = { - encryptedValue: string; - iv: string; - tag: string; + isPasswordProtected: boolean; + secret: { + secretValue?: string; + encryptedValue: string; + iv: string; + tag: string; + accessType: SecretSharingAccessType; + orgName?: string; + }; }; export type TDeleteSharedSecretRequest = { sharedSecretId: string; }; + +export enum SecretSharingAccessType { + Anyone = "anyone", + Organization = "organization" +} diff --git a/frontend/src/hooks/api/secretSnapshots/queries.tsx b/frontend/src/hooks/api/secretSnapshots/queries.tsx index ca1ec76fd4..f45d6db5e3 100644 --- a/frontend/src/hooks/api/secretSnapshots/queries.tsx +++ b/frontend/src/hooks/api/secretSnapshots/queries.tsx @@ -1,13 +1,9 @@ /* eslint-disable no-param-reassign */ import { useInfiniteQuery, useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; -import { - decryptAssymmetric, - decryptSymmetric -} from "@app/components/utilities/cryptography/crypto"; import { apiRequest } from "@app/config/request"; -import { DecryptedSecret } from "../secrets/types"; +import { SecretType, SecretV3RawSanitized } from "../secrets/types"; import { TGetSecretSnapshotsDTO, TSecretRollbackDTO, @@ -65,55 +61,33 @@ const fetchSnapshotEncSecrets = async (snapshotId: string) => { return res.data.secretSnapshot; }; -export const useGetSnapshotSecrets = ({ decryptFileKey, snapshotId }: TSnapshotDataProps) => +export const useGetSnapshotSecrets = ({ snapshotId }: TSnapshotDataProps) => useQuery({ queryKey: secretSnapshotKeys.snapshotData(snapshotId), - enabled: Boolean(snapshotId && decryptFileKey), + enabled: Boolean(snapshotId), queryFn: () => fetchSnapshotEncSecrets(snapshotId), select: (data) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const latestKey = decryptFileKey; - const key = decryptAssymmetric({ - ciphertext: latestKey.encryptedKey, - nonce: latestKey.nonce, - publicKey: latestKey.sender.publicKey, - privateKey: PRIVATE_KEY - }); - - const sharedSecrets: DecryptedSecret[] = []; + const sharedSecrets: SecretV3RawSanitized[] = []; const personalSecrets: Record = {}; - data.secretVersions.forEach((encSecret) => { - const secretKey = decryptSymmetric({ - ciphertext: encSecret.secretKeyCiphertext, - iv: encSecret.secretKeyIV, - tag: encSecret.secretKeyTag, - key - }); - - const secretValue = decryptSymmetric({ - ciphertext: encSecret.secretValueCiphertext, - iv: encSecret.secretValueIV, - tag: encSecret.secretValueTag, - key - }); - - const secretComment = ""; - + data.secretVersions.forEach((secretVersion) => { const decryptedSecret = { - id: encSecret.secretId, + id: secretVersion.secretId, env: data.environment.slug, - key: secretKey, - value: secretValue, - tags: encSecret.tags, - comment: secretComment, - createdAt: encSecret.createdAt, - updatedAt: encSecret.updatedAt, + key: secretVersion.secretKey, + value: secretVersion.secretValue || "", + tags: secretVersion.tags, + comment: secretVersion.secretComment, + createdAt: secretVersion.createdAt, + updatedAt: secretVersion.updatedAt, type: "modified", - version: encSecret.version + version: secretVersion.version }; - if (encSecret.type === "personal") { - personalSecrets[decryptedSecret.key] = { id: encSecret.secretId, value: secretValue }; + if (secretVersion.type === SecretType.Personal) { + personalSecrets[decryptedSecret.key] = { + id: secretVersion.secretId, + value: secretVersion.secretValue || "" + }; } else { sharedSecrets.push(decryptedSecret); } diff --git a/frontend/src/hooks/api/secretSnapshots/types.ts b/frontend/src/hooks/api/secretSnapshots/types.ts index 2eb956a8a6..54bf152cc7 100644 --- a/frontend/src/hooks/api/secretSnapshots/types.ts +++ b/frontend/src/hooks/api/secretSnapshots/types.ts @@ -1,5 +1,4 @@ -import { UserWsKeyPair } from "../keys/types"; -import { EncryptedSecretVersion } from "../secrets/types"; +import { SecretVersions } from "../secrets/types"; import { WorkspaceEnv } from "../types"; export type TSecretSnapshot = { @@ -12,7 +11,7 @@ export type TSecretSnapshot = { export type TSnapshotData = Omit & { id: string; - secretVersions: EncryptedSecretVersion[]; + secretVersions: SecretVersions[]; folderVersion: Array<{ name: string; id: string }>; environment: WorkspaceEnv; }; @@ -20,7 +19,6 @@ export type TSnapshotData = Omit & { export type TSnapshotDataProps = { snapshotId: string; env: string; - decryptFileKey: UserWsKeyPair; }; export type TGetSecretSnapshotsDTO = { diff --git a/frontend/src/hooks/api/secrets/constants.ts b/frontend/src/hooks/api/secrets/constants.ts new file mode 100644 index 0000000000..97280c9026 --- /dev/null +++ b/frontend/src/hooks/api/secrets/constants.ts @@ -0,0 +1 @@ +export const ERROR_NOT_ALLOWED_READ_SECRETS = "You are not allowed to read on secrets"; diff --git a/frontend/src/hooks/api/secrets/index.ts b/frontend/src/hooks/api/secrets/index.ts index b58e8779aa..74d6c4439a 100644 --- a/frontend/src/hooks/api/secrets/index.ts +++ b/frontend/src/hooks/api/secrets/index.ts @@ -4,7 +4,12 @@ export { useCreateSecretV3, useDeleteSecretBatch, useDeleteSecretV3, + useMoveSecrets, useUpdateSecretBatch, useUpdateSecretV3 } from "./mutations"; -export { useGetProjectSecrets, useGetProjectSecretsAllEnv, useGetSecretVersion } from "./queries"; +export { + useGetProjectSecrets, + useGetProjectSecretsAllEnv, + useGetSecretReferenceTree, + useGetSecretVersion} from "./queries"; diff --git a/frontend/src/hooks/api/secrets/mutations.tsx b/frontend/src/hooks/api/secrets/mutations.tsx index e397c7b550..7bc3849be4 100644 --- a/frontend/src/hooks/api/secrets/mutations.tsx +++ b/frontend/src/hooks/api/secrets/mutations.tsx @@ -1,70 +1,21 @@ -import crypto from "crypto"; - import { MutationOptions, useMutation, useQueryClient } from "@tanstack/react-query"; -import { - decryptAssymmetric, - encryptSymmetric -} from "@app/components/utilities/cryptography/crypto"; import { apiRequest } from "@app/config/request"; +import { dashboardKeys } from "@app/hooks/api/dashboard/queries"; import { secretApprovalRequestKeys } from "../secretApprovalRequest/queries"; import { secretSnapshotKeys } from "../secretSnapshots/queries"; import { secretKeys } from "./queries"; import { - CreateSecretDTO, TCreateSecretBatchDTO, TCreateSecretsV3DTO, TDeleteSecretBatchDTO, TDeleteSecretsV3DTO, + TMoveSecretsDTO, TUpdateSecretBatchDTO, TUpdateSecretsV3DTO } from "./types"; -const encryptSecret = (randomBytes: string, key: string, value?: string, comment?: string) => { - // encrypt key - const { - ciphertext: secretKeyCiphertext, - iv: secretKeyIV, - tag: secretKeyTag - } = encryptSymmetric({ - plaintext: key, - key: randomBytes - }); - - // encrypt value - const { - ciphertext: secretValueCiphertext, - iv: secretValueIV, - tag: secretValueTag - } = encryptSymmetric({ - plaintext: value ?? "", - key: randomBytes - }); - - // encrypt comment - const { - ciphertext: secretCommentCiphertext, - iv: secretCommentIV, - tag: secretCommentTag - } = encryptSymmetric({ - plaintext: comment ?? "", - key: randomBytes - }); - - return { - secretKeyCiphertext, - secretKeyIV, - secretKeyTag, - secretValueCiphertext, - secretValueIV, - secretValueTag, - secretCommentCiphertext, - secretCommentIV, - secretCommentTag - }; -}; - export const useCreateSecretV3 = ({ options }: { @@ -77,35 +28,28 @@ export const useCreateSecretV3 = ({ type, environment, workspaceId, - secretName, + secretKey, secretValue, - latestFileKey, secretComment, - skipMultilineEncoding + skipMultilineEncoding, + tagIds }) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - - const randomBytes = latestFileKey - ? decryptAssymmetric({ - ciphertext: latestFileKey.encryptedKey, - nonce: latestFileKey.nonce, - publicKey: latestFileKey.sender.publicKey, - privateKey: PRIVATE_KEY - }) - : crypto.randomBytes(16).toString("hex"); - - const reqBody = { - workspaceId, - environment, - type, + const { data } = await apiRequest.post(`/api/v3/secrets/raw/${secretKey}`, { secretPath, - ...encryptSecret(randomBytes, secretName, secretValue, secretComment), - skipMultilineEncoding - }; - const { data } = await apiRequest.post(`/api/v3/secrets/${secretName}`, reqBody); + type, + environment, + workspaceId, + secretValue, + secretComment, + skipMultilineEncoding, + tagIds + }); return data; }, onSuccess: (_, { workspaceId, environment, secretPath }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId: workspaceId, secretPath }) + ); queryClient.invalidateQueries( secretKeys.getProjectSecret({ workspaceId, environment, secretPath }) ); @@ -131,47 +75,36 @@ export const useUpdateSecretV3 = ({ mutationFn: async ({ secretPath = "/", type, - secretId, environment, workspaceId, - secretName, + secretKey, secretValue, - latestFileKey, - tags, + tagIds, secretComment, secretReminderRepeatDays, secretReminderNote, newSecretName, skipMultilineEncoding }) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - - const randomBytes = latestFileKey - ? decryptAssymmetric({ - ciphertext: latestFileKey.encryptedKey, - nonce: latestFileKey.nonce, - publicKey: latestFileKey.sender.publicKey, - privateKey: PRIVATE_KEY - }) - : crypto.randomBytes(16).toString("hex"); - - const reqBody = { + const { data } = await apiRequest.patch(`/api/v3/secrets/raw/${secretKey}`, { workspaceId, environment, type, secretReminderNote, secretReminderRepeatDays, secretPath, - secretId, - ...encryptSecret(randomBytes, newSecretName ?? secretName, secretValue, secretComment), - tags, skipMultilineEncoding, - secretName: newSecretName - }; - const { data } = await apiRequest.patch(`/api/v3/secrets/${secretName}`, reqBody); + newSecretName, + secretComment, + tagIds, + secretValue + }); return data; }, onSuccess: (_, { workspaceId, environment, secretPath }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId: workspaceId, secretPath }) + ); queryClient.invalidateQueries( secretKeys.getProjectSecret({ workspaceId, environment, secretPath }) ); @@ -200,23 +133,24 @@ export const useDeleteSecretV3 = ({ type, environment, workspaceId, - secretName, + secretKey, secretId }) => { - const reqBody = { - workspaceId, - environment, - type, - secretPath, - secretId - }; - - const { data } = await apiRequest.delete(`/api/v3/secrets/${secretName}`, { - data: reqBody + const { data } = await apiRequest.delete(`/api/v3/secrets/raw/${secretKey}`, { + data: { + workspaceId, + environment, + type, + secretPath, + secretId + } }); return data; }, onSuccess: (_, { workspaceId, environment, secretPath }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId: workspaceId, secretPath }) + ); queryClient.invalidateQueries( secretKeys.getProjectSecret({ workspaceId, environment, secretPath }) ); @@ -240,36 +174,19 @@ export const useCreateSecretBatch = ({ const queryClient = useQueryClient(); return useMutation<{}, {}, TCreateSecretBatchDTO>({ - mutationFn: async ({ secretPath = "/", workspaceId, environment, secrets, latestFileKey }) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const randomBytes = latestFileKey - ? decryptAssymmetric({ - ciphertext: latestFileKey.encryptedKey, - nonce: latestFileKey.nonce, - publicKey: latestFileKey.sender.publicKey, - privateKey: PRIVATE_KEY - }) - : crypto.randomBytes(16).toString("hex"); - - const reqBody = { + mutationFn: async ({ secretPath = "/", workspaceId, environment, secrets }) => { + const { data } = await apiRequest.post("/api/v3/secrets/batch/raw", { workspaceId, environment, secretPath, - secrets: secrets.map( - ({ secretName, secretValue, secretComment, metadata, type, skipMultilineEncoding }) => ({ - secretName, - ...encryptSecret(randomBytes, secretName, secretValue, secretComment), - type, - metadata, - skipMultilineEncoding - }) - ) - }; - - const { data } = await apiRequest.post("/api/v3/secrets/batch", reqBody); + secrets + }); return data; }, onSuccess: (_, { workspaceId, environment, secretPath }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId: workspaceId, secretPath }) + ); queryClient.invalidateQueries( secretKeys.getProjectSecret({ workspaceId, environment, secretPath }) ); @@ -293,36 +210,19 @@ export const useUpdateSecretBatch = ({ const queryClient = useQueryClient(); return useMutation<{}, {}, TUpdateSecretBatchDTO>({ - mutationFn: async ({ secretPath = "/", workspaceId, environment, secrets, latestFileKey }) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const randomBytes = latestFileKey - ? decryptAssymmetric({ - ciphertext: latestFileKey.encryptedKey, - nonce: latestFileKey.nonce, - publicKey: latestFileKey.sender.publicKey, - privateKey: PRIVATE_KEY - }) - : crypto.randomBytes(16).toString("hex"); - - const reqBody = { + mutationFn: async ({ secretPath = "/", workspaceId, environment, secrets }) => { + const { data } = await apiRequest.patch("/api/v3/secrets/batch/raw", { workspaceId, environment, secretPath, - secrets: secrets.map( - ({ secretName, secretValue, secretComment, type, tags, skipMultilineEncoding }) => ({ - secretName, - ...encryptSecret(randomBytes, secretName, secretValue, secretComment), - type, - tags, - skipMultilineEncoding - }) - ) - }; - - const { data } = await apiRequest.patch("/api/v3/secrets/batch", reqBody); + secrets + }); return data; }, onSuccess: (_, { workspaceId, environment, secretPath }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId: workspaceId, secretPath }) + ); queryClient.invalidateQueries( secretKeys.getProjectSecret({ workspaceId, environment, secretPath }) ); @@ -347,19 +247,20 @@ export const useDeleteSecretBatch = ({ return useMutation<{}, {}, TDeleteSecretBatchDTO>({ mutationFn: async ({ secretPath = "/", workspaceId, environment, secrets }) => { - const reqBody = { - workspaceId, - environment, - secretPath, - secrets - }; - - const { data } = await apiRequest.delete("/api/v3/secrets/batch", { - data: reqBody + const { data } = await apiRequest.delete("/api/v3/secrets/batch/raw", { + data: { + workspaceId, + environment, + secretPath, + secrets + } }); return data; }, onSuccess: (_, { workspaceId, environment, secretPath }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ projectId: workspaceId, secretPath }) + ); queryClient.invalidateQueries( secretKeys.getProjectSecret({ workspaceId, environment, secretPath }) ); @@ -375,7 +276,80 @@ export const useDeleteSecretBatch = ({ }); }; -export const createSecret = async (dto: CreateSecretDTO) => { +export const useMoveSecrets = ({ + options +}: { + options?: Omit, "mutationFn">; +} = {}) => { + const queryClient = useQueryClient(); + + return useMutation< + { + isSourceUpdated: boolean; + isDestinationUpdated: boolean; + }, + {}, + TMoveSecretsDTO + >({ + mutationFn: async ({ + sourceEnvironment, + sourceSecretPath, + projectSlug, + destinationEnvironment, + destinationSecretPath, + secretIds, + shouldOverwrite + }) => { + const { data } = await apiRequest.post<{ + isSourceUpdated: boolean; + isDestinationUpdated: boolean; + }>("/api/v3/secrets/move", { + sourceEnvironment, + sourceSecretPath, + projectSlug, + destinationEnvironment, + destinationSecretPath, + secretIds, + shouldOverwrite + }); + + return data; + }, + onSuccess: (_, { projectId, sourceEnvironment, sourceSecretPath }) => { + queryClient.invalidateQueries( + dashboardKeys.getDashboardSecrets({ + projectId, + secretPath: sourceSecretPath + }) + ); + queryClient.invalidateQueries( + secretKeys.getProjectSecret({ + workspaceId: projectId, + environment: sourceEnvironment, + secretPath: sourceSecretPath + }) + ); + queryClient.invalidateQueries( + secretSnapshotKeys.list({ + environment: sourceEnvironment, + workspaceId: projectId, + directory: sourceSecretPath + }) + ); + queryClient.invalidateQueries( + secretSnapshotKeys.count({ + environment: sourceEnvironment, + workspaceId: projectId, + directory: sourceSecretPath + }) + ); + queryClient.invalidateQueries(secretApprovalRequestKeys.count({ workspaceId: projectId })); + }, + ...options + }); +}; + +export const createSecret = async (dto: TCreateSecretsV3DTO) => { const { data } = await apiRequest.post(`/api/v3/secrets/${dto.secretKey}`, dto); return data; }; diff --git a/frontend/src/hooks/api/secrets/queries.tsx b/frontend/src/hooks/api/secrets/queries.tsx index 28999389e9..b3b3a4164b 100644 --- a/frontend/src/hooks/api/secrets/queries.tsx +++ b/frontend/src/hooks/api/secrets/queries.tsx @@ -1,86 +1,80 @@ /* eslint-disable no-param-reassign */ import { useCallback, useMemo } from "react"; import { useQueries, useQuery, UseQueryOptions } from "@tanstack/react-query"; +import axios from "axios"; -import { - decryptAssymmetric, - decryptSymmetric -} from "@app/components/utilities/cryptography/crypto"; +import { createNotification } from "@app/components/notifications"; import { apiRequest } from "@app/config/request"; +import { useToggle } from "@app/hooks/useToggle"; -import { UserWsKeyPair } from "../keys/types"; +import { ERROR_NOT_ALLOWED_READ_SECRETS } from "./constants"; import { - DecryptedSecret, - EncryptedSecret, - EncryptedSecretVersion, GetSecretVersionsDTO, + SecretType, + SecretV3Raw, + SecretV3RawResponse, + SecretV3RawSanitized, + SecretVersions, TGetProjectSecretsAllEnvDTO, TGetProjectSecretsDTO, - TGetProjectSecretsKey + TGetProjectSecretsKey, + TGetSecretReferenceTreeDTO, + TSecretReferenceTraceNode } from "./types"; export const secretKeys = { // this is also used in secretSnapshot part getProjectSecret: ({ workspaceId, environment, secretPath }: TGetProjectSecretsKey) => [{ workspaceId, environment, secretPath }, "secrets"] as const, - getSecretVersion: (secretId: string) => [{ secretId }, "secret-versions"] as const + getSecretVersion: (secretId: string) => [{ secretId }, "secret-versions"] as const, + getSecretReferenceTree: (dto: TGetSecretReferenceTreeDTO) => ["secret-reference-tree", dto] }; -export const decryptSecrets = ( - encryptedSecrets: EncryptedSecret[], - decryptFileKey: UserWsKeyPair -) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const key = decryptAssymmetric({ - ciphertext: decryptFileKey.encryptedKey, - nonce: decryptFileKey.nonce, - publicKey: decryptFileKey.sender.publicKey, - privateKey: PRIVATE_KEY +export const fetchProjectSecrets = async ({ + workspaceId, + environment, + secretPath, + includeImports, + expandSecretReferences +}: TGetProjectSecretsKey) => { + const { data } = await apiRequest.get("/api/v3/secrets/raw", { + params: { + environment, + workspaceId, + secretPath, + expandSecretReferences, + include_imports: includeImports + } }); - const personalSecrets: Record = {}; - const secrets: DecryptedSecret[] = []; - encryptedSecrets.forEach((encSecret) => { - const secretKey = decryptSymmetric({ - ciphertext: encSecret.secretKeyCiphertext, - iv: encSecret.secretKeyIV, - tag: encSecret.secretKeyTag, - key - }); + return data; +}; - const secretValue = decryptSymmetric({ - ciphertext: encSecret.secretValueCiphertext, - iv: encSecret.secretValueIV, - tag: encSecret.secretValueTag, - key - }); - - const secretComment = decryptSymmetric({ - ciphertext: encSecret.secretCommentCiphertext, - iv: encSecret.secretCommentIV, - tag: encSecret.secretCommentTag, - key - }); - - const decryptedSecret: DecryptedSecret = { - id: encSecret.id, - env: encSecret.environment, - key: secretKey, - value: secretValue, - tags: encSecret.tags, - comment: secretComment, - reminderRepeatDays: encSecret.secretReminderRepeatDays, - reminderNote: encSecret.secretReminderNote, - createdAt: encSecret.createdAt, - updatedAt: encSecret.updatedAt, - version: encSecret.version, - skipMultilineEncoding: encSecret.skipMultilineEncoding +export const mergePersonalSecrets = (rawSecrets: SecretV3Raw[]) => { + const personalSecrets: Record = {}; + const secrets: SecretV3RawSanitized[] = []; + rawSecrets.forEach((el) => { + const decryptedSecret: SecretV3RawSanitized = { + id: el.id, + env: el.environment, + key: el.secretKey, + value: el.secretValue, + tags: el.tags || [], + comment: el.secretComment || "", + reminderRepeatDays: el.secretReminderRepeatDays, + reminderNote: el.secretReminderNote, + createdAt: el.createdAt, + updatedAt: el.updatedAt, + version: el.version, + skipMultilineEncoding: el.skipMultilineEncoding, + path: el.secretPath }; - if (encSecret.type === "personal") { + if (el.type === SecretType.Personal) { personalSecrets[decryptedSecret.key] = { - id: encSecret.id, - value: secretValue + id: el.id, + value: el.secretValue, + env: el.environment }; } else { secrets.push(decryptedSecret); @@ -88,9 +82,10 @@ export const decryptSecrets = ( }); secrets.forEach((sec) => { - if (personalSecrets?.[sec.key]) { - sec.idOverride = personalSecrets[sec.key].id; - sec.valueOverride = personalSecrets[sec.key].value; + const personalSecret = personalSecrets?.[sec.key]; + if (personalSecret && personalSecret.env === sec.env) { + sec.idOverride = personalSecret.id; + sec.valueOverride = personalSecret.value; sec.overrideAction = "modified"; } }); @@ -98,33 +93,17 @@ export const decryptSecrets = ( return secrets; }; -export const fetchProjectEncryptedSecrets = async ({ - workspaceId, - environment, - secretPath -}: TGetProjectSecretsKey) => { - const { data } = await apiRequest.get<{ secrets: EncryptedSecret[] }>("/api/v3/secrets", { - params: { - environment, - workspaceId, - secretPath - } - }); - - return data.secrets; -}; export const useGetProjectSecrets = ({ workspaceId, environment, - decryptFileKey, secretPath, options }: TGetProjectSecretsDTO & { options?: Omit< UseQueryOptions< - EncryptedSecret[], + SecretV3RawResponse, unknown, - DecryptedSecret[], + SecretV3RawSanitized[], ReturnType >, "queryKey" | "queryFn" @@ -133,28 +112,65 @@ export const useGetProjectSecrets = ({ useQuery({ ...options, // wait for all values to be available - enabled: Boolean(decryptFileKey && workspaceId && environment) && (options?.enabled ?? true), + enabled: Boolean(workspaceId && environment) && (options?.enabled ?? true), queryKey: secretKeys.getProjectSecret({ workspaceId, environment, secretPath }), - queryFn: async () => fetchProjectEncryptedSecrets({ workspaceId, environment, secretPath }), - select: (secrets: EncryptedSecret[]) => decryptSecrets(secrets, decryptFileKey) + queryFn: () => fetchProjectSecrets({ workspaceId, environment, secretPath }), + onError: (error) => { + if (axios.isAxiosError(error)) { + const serverResponse = error.response?.data as { message: string }; + createNotification({ + title: "Error fetching secrets", + type: "error", + text: serverResponse.message + }); + } + }, + select: useCallback( + (data: Awaited>) => mergePersonalSecrets(data.secrets), + [] + ) }); export const useGetProjectSecretsAllEnv = ({ workspaceId, envs, - decryptFileKey, secretPath }: TGetProjectSecretsAllEnvDTO) => { + const [isErrorHandled, setIsErrorHandled] = useToggle(false); + const secrets = useQueries({ queries: envs.map((environment) => ({ - queryKey: secretKeys.getProjectSecret({ workspaceId, environment, secretPath }), - enabled: Boolean(decryptFileKey && workspaceId && environment), - queryFn: async () => fetchProjectEncryptedSecrets({ workspaceId, environment, secretPath }), - select: (secs: EncryptedSecret[]) => - decryptSecrets(secs, decryptFileKey).reduce>( - (prev, curr) => ({ ...prev, [curr.key]: curr }), - {} - ) + queryKey: secretKeys.getProjectSecret({ + workspaceId, + environment, + secretPath + }), + enabled: Boolean(workspaceId && environment), + onError: (error: unknown) => { + if (axios.isAxiosError(error) && !isErrorHandled) { + const serverResponse = error.response?.data as { message: string }; + if (serverResponse.message !== ERROR_NOT_ALLOWED_READ_SECRETS) { + createNotification({ + title: "Error fetching secrets", + type: "error", + text: serverResponse.message + }); + } + + setIsErrorHandled.on(); + } + }, + queryFn: () => fetchProjectSecrets({ workspaceId, environment, secretPath }), + staleTime: 60 * 1000, + // eslint-disable-next-line react-hooks/rules-of-hooks + select: useCallback( + (data: Awaited>) => + mergePersonalSecrets(data.secrets).reduce>( + (prev, curr) => ({ ...prev, [curr.key]: curr }), + {} + ), + [] + ) })) }); @@ -194,7 +210,7 @@ export const useGetProjectSecretsAllEnv = ({ }; const fetchEncryptedSecretVersion = async (secretId: string, offset: number, limit: number) => { - const { data } = await apiRequest.get<{ secretVersions: EncryptedSecretVersion[] }>( + const { data } = await apiRequest.get<{ secretVersions: SecretVersions[] }>( `/api/v1/secret/${secretId}/secret-versions`, { params: { @@ -208,33 +224,40 @@ const fetchEncryptedSecretVersion = async (secretId: string, offset: number, lim export const useGetSecretVersion = (dto: GetSecretVersionsDTO) => useQuery({ - enabled: Boolean(dto.secretId && dto.decryptFileKey), + enabled: Boolean(dto.secretId), queryKey: secretKeys.getSecretVersion(dto.secretId), queryFn: () => fetchEncryptedSecretVersion(dto.secretId, dto.offset, dto.limit), - select: useCallback( - (data: EncryptedSecretVersion[]) => { - const PRIVATE_KEY = localStorage.getItem("PRIVATE_KEY") as string; - const latestKey = dto.decryptFileKey; - const key = decryptAssymmetric({ - ciphertext: latestKey.encryptedKey, - nonce: latestKey.nonce, - publicKey: latestKey.sender.publicKey, - privateKey: PRIVATE_KEY - }); - - return data - .map((el) => ({ - createdAt: el.createdAt, - id: el.id, - value: decryptSymmetric({ - ciphertext: el.secretValueCiphertext, - iv: el.secretValueIV, - tag: el.secretValueTag, - key - }) - })) - .sort((a, b) => b.createdAt.localeCompare(a.createdAt)); - }, - [dto.decryptFileKey] - ) + select: useCallback((data: SecretVersions[]) => { + return data.sort((a, b) => b.createdAt.localeCompare(a.createdAt)); + }, []) + }); + +const fetchSecretReferenceTree = async ({ + secretPath, + projectId, + secretKey, + environmentSlug +}: TGetSecretReferenceTreeDTO) => { + const { data } = await apiRequest.get<{ tree: TSecretReferenceTraceNode; value: string }>( + `/api/v3/secrets/raw/${secretKey}/secret-reference-tree`, + { + params: { + secretPath, + workspaceId: projectId, + environment: environmentSlug + } + } + ); + return data; +}; + +export const useGetSecretReferenceTree = (dto: TGetSecretReferenceTreeDTO) => + useQuery({ + enabled: + Boolean(dto.environmentSlug) && + Boolean(dto.secretPath) && + Boolean(dto.projectId) && + Boolean(dto.secretKey), + queryKey: secretKeys.getSecretReferenceTree(dto), + queryFn: () => fetchSecretReferenceTree(dto) }); diff --git a/frontend/src/hooks/api/secrets/types.ts b/frontend/src/hooks/api/secrets/types.ts index f36872e43a..f4dfb07d14 100644 --- a/frontend/src/hooks/api/secrets/types.ts +++ b/frontend/src/hooks/api/secrets/types.ts @@ -1,11 +1,15 @@ -import type { UserWsKeyPair } from "../keys/types"; import type { WsTag } from "../tags/types"; +export enum SecretType { + Shared = "shared", + Personal = "personal" +} + export type EncryptedSecret = { id: string; version: number; workspace: string; - type: "shared" | "personal"; + type: SecretType; environment: string; secretKeyCiphertext: string; secretKeyIV: string; @@ -25,18 +29,20 @@ export type EncryptedSecret = { tags: WsTag[]; }; -export type DecryptedSecret = { +// both personal and shared secret stitched together for dashboard +export type SecretV3RawSanitized = { id: string; version: number; key: string; - value: string; - comment: string; + value?: string; + comment?: string; reminderRepeatDays?: number | null; reminderNote?: string | null; - tags: WsTag[]; + tags?: WsTag[]; createdAt: string; updatedAt: string; env: string; + path?: string; valueOverride?: string; idOverride?: string; overrideAction?: string; @@ -44,20 +50,47 @@ export type DecryptedSecret = { skipMultilineEncoding?: boolean; }; -export type EncryptedSecretVersion = { +export type SecretV3Raw = { + id: string; + _id: string; + workspace: string; + environment: string; + version: number; + type: string; + secretKey: string; + secretPath: string; + secretValue?: string; + secretComment?: string; + secretReminderNote?: string; + secretReminderRepeatDays?: number; + skipMultilineEncoding?: boolean; + metadata?: Record; + tags?: WsTag[]; + createdAt: string; + updatedAt: string; +}; + +export type SecretV3RawResponse = { + secrets: SecretV3Raw[]; + imports: { + secretPath: string; + environment: string; + folderId: string; + secrets: SecretV3Raw[]; + }[]; +}; + +export type SecretVersions = { id: string; secretId: string; version: number; workspace: string; - type: string; + type: SecretType; isDeleted: boolean; envId: string; - secretKeyCiphertext: string; - secretKeyIV: string; - secretKeyTag: string; - secretValueCiphertext: string; - secretValueIV: string; - secretValueTag: string; + secretKey: string; + secretValue?: string; + secretComment?: string; tags: WsTag[]; __v: number; skipMultilineEncoding?: boolean; @@ -70,16 +103,15 @@ export type TGetProjectSecretsKey = { workspaceId: string; environment: string; secretPath?: string; + includeImports?: boolean; + expandSecretReferences?: boolean; }; -export type TGetProjectSecretsDTO = { - decryptFileKey: UserWsKeyPair; -} & TGetProjectSecretsKey; +export type TGetProjectSecretsDTO = TGetProjectSecretsKey; export type TGetProjectSecretsAllEnvDTO = { workspaceId: string; envs: string[]; - decryptFileKey: UserWsKeyPair; folderId?: string; secretPath?: string; isPaused?: boolean; @@ -89,44 +121,41 @@ export type GetSecretVersionsDTO = { secretId: string; limit: number; offset: number; - decryptFileKey: UserWsKeyPair; }; export type TCreateSecretsV3DTO = { - latestFileKey: UserWsKeyPair; - secretName: string; + secretKey: string; secretValue: string; secretComment: string; skipMultilineEncoding?: boolean; secretPath: string; workspaceId: string; environment: string; - type: string; + type: SecretType; + tagIds?: string[]; }; export type TUpdateSecretsV3DTO = { - latestFileKey: UserWsKeyPair; workspaceId: string; environment: string; - type: string; secretPath: string; + type: SecretType; skipMultilineEncoding?: boolean; newSecretName?: string; - secretName: string; - secretId?: string; + secretKey: string; secretValue: string; secretComment?: string; secretReminderRepeatDays?: number | null; secretReminderNote?: string | null; - tags?: string[]; + tagIds?: string[]; }; export type TDeleteSecretsV3DTO = { workspaceId: string; environment: string; - type: "shared" | "personal"; + type: SecretType; secretPath: string; - secretName: string; + secretKey: string; secretId?: string; }; @@ -134,13 +163,13 @@ export type TCreateSecretBatchDTO = { workspaceId: string; environment: string; secretPath: string; - latestFileKey: UserWsKeyPair; secrets: Array<{ - secretName: string; + secretKey: string; secretValue: string; secretComment: string; skipMultilineEncoding?: boolean; - type: "shared" | "personal"; + type: SecretType; + tagIds?: string[]; metadata?: { source?: string; }; @@ -151,14 +180,16 @@ export type TUpdateSecretBatchDTO = { workspaceId: string; environment: string; secretPath: string; - latestFileKey: UserWsKeyPair; secrets: Array<{ - type: "shared" | "personal"; - secretName: string; - skipMultilineEncoding?: boolean; + type: SecretType; + secretKey: string; secretValue: string; - secretComment: string; - tags?: string[]; + secretComment?: string; + skipMultilineEncoding?: boolean; + tagIds?: string[]; + metadata?: { + source?: string; + }; }>; }; @@ -167,27 +198,33 @@ export type TDeleteSecretBatchDTO = { environment: string; secretPath: string; secrets: Array<{ - secretName: string; - type: "shared" | "personal"; + secretKey: string; + type: SecretType; }>; }; -export type CreateSecretDTO = { - workspaceId: string; - environment: string; - type: "shared" | "personal"; - secretKey: string; - secretKeyCiphertext: string; - secretKeyIV: string; - secretKeyTag: string; - secretValueCiphertext: string; - secretValueIV: string; - secretValueTag: string; - secretCommentCiphertext: string; - secretCommentIV: string; - secretCommentTag: string; - secretPath: string; - metadata?: { - source?: string; - }; +export type TMoveSecretsDTO = { + projectSlug: string; + projectId: string; + sourceEnvironment: string; + sourceSecretPath: string; + destinationEnvironment: string; + destinationSecretPath: string; + secretIds: string[]; + shouldOverwrite: boolean; +}; + +export type TGetSecretReferenceTreeDTO = { + secretKey: string; + secretPath: string; + environmentSlug: string; + projectId: string; +}; + +export type TSecretReferenceTraceNode = { + key: string; + value?: string; + environment: string; + secretPath: string; + children: TSecretReferenceTraceNode[]; }; diff --git a/frontend/src/hooks/api/serverDetails/types.ts b/frontend/src/hooks/api/serverDetails/types.ts index 911526404c..3e22c2684b 100644 --- a/frontend/src/hooks/api/serverDetails/types.ts +++ b/frontend/src/hooks/api/serverDetails/types.ts @@ -4,5 +4,5 @@ export type ServerStatus = { emailConfigured: boolean; secretScanningConfigured: boolean; redisConfigured: boolean; - samlDefaultOrgSlug: boolean + samlDefaultOrgSlug: string; }; diff --git a/frontend/src/hooks/api/ssoConfig/queries.tsx b/frontend/src/hooks/api/ssoConfig/queries.tsx index 3074c5edaa..cbb8e0abe1 100644 --- a/frontend/src/hooks/api/ssoConfig/queries.tsx +++ b/frontend/src/hooks/api/ssoConfig/queries.tsx @@ -11,9 +11,15 @@ export const useGetSSOConfig = (organizationId: string) => { return useQuery({ queryKey: ssoConfigKeys.getSSOConfig(organizationId), queryFn: async () => { - const { data } = await apiRequest.get(`/api/v1/sso/config?organizationId=${organizationId}`); + try { + const { data } = await apiRequest.get( + `/api/v1/sso/config?organizationId=${organizationId}` + ); - return data; + return data; + } catch (err) { + return null; + } }, enabled: true }); diff --git a/frontend/src/hooks/api/subscriptions/types.ts b/frontend/src/hooks/api/subscriptions/types.ts index 45414292da..b1c4e224d2 100644 --- a/frontend/src/hooks/api/subscriptions/types.ts +++ b/frontend/src/hooks/api/subscriptions/types.ts @@ -2,6 +2,8 @@ export type SubscriptionPlan = { id: string; membersUsed: number; memberLimit: number; + identitiesUsed: number; + identityLimit: number; auditLogs: boolean; dynamicSecret: boolean; auditLogsRetentionDays: number; @@ -21,18 +23,26 @@ export type SubscriptionPlan = { workspacesUsed: number; environmentLimit: number; samlSSO: boolean; + hsm: boolean; + oidcSSO: boolean; scim: boolean; ldap: boolean; groups: boolean; status: - | "incomplete" - | "incomplete_expired" - | "trialing" - | "active" - | "past_due" - | "canceled" - | "unpaid" - | null; + | "incomplete" + | "incomplete_expired" + | "trialing" + | "active" + | "past_due" + | "canceled" + | "unpaid" + | null; trial_end: number | null; has_used_trial: boolean; + caCrl: boolean; + instanceUserManagement: boolean; + externalKms: boolean; + pkiEst: boolean; + enforceMfa: boolean; + projectTemplates: boolean; }; diff --git a/frontend/src/hooks/api/tags/queries.tsx b/frontend/src/hooks/api/tags/queries.tsx index 311ed09416..d1c4b533d6 100644 --- a/frontend/src/hooks/api/tags/queries.tsx +++ b/frontend/src/hooks/api/tags/queries.tsx @@ -28,11 +28,10 @@ export const useCreateWsTag = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async ({ workspaceID, tagName, tagColor, tagSlug }) => { + mutationFn: async ({ workspaceID, tagColor, tagSlug }) => { const { data } = await apiRequest.post<{ workspaceTag: WsTag }>( `/api/v1/workspace/${workspaceID}/tags`, { - name: tagName, color: tagColor || "", slug: tagSlug } diff --git a/frontend/src/hooks/api/tags/types.ts b/frontend/src/hooks/api/tags/types.ts index 9b4f70587d..72d710cfa0 100644 --- a/frontend/src/hooks/api/tags/types.ts +++ b/frontend/src/hooks/api/tags/types.ts @@ -2,7 +2,6 @@ export type UserWsTags = WsTag[]; export type WsTag = { id: string; - name: string; slug: string; color?: string; projectId: string; @@ -16,7 +15,6 @@ export type WorkspaceTag = { id: string; name: string; slug: string }; export type CreateTagDTO = { workspaceID: string; tagSlug: string; - tagName: string; tagColor: string; }; diff --git a/frontend/src/hooks/api/userEngagement/index.ts b/frontend/src/hooks/api/userEngagement/index.ts new file mode 100644 index 0000000000..5a4c29fa88 --- /dev/null +++ b/frontend/src/hooks/api/userEngagement/index.ts @@ -0,0 +1 @@ +export { useCreateUserWish } from "./mutations"; diff --git a/frontend/src/hooks/api/userEngagement/mutations.tsx b/frontend/src/hooks/api/userEngagement/mutations.tsx new file mode 100644 index 0000000000..d876e65c8a --- /dev/null +++ b/frontend/src/hooks/api/userEngagement/mutations.tsx @@ -0,0 +1,14 @@ +import { useMutation } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { TCreateUserWishDto } from "./types"; + +export const useCreateUserWish = () => { + return useMutation<{}, {}, TCreateUserWishDto>({ + mutationFn: async (dto) => { + const { data } = await apiRequest.post("/api/v1/user-engagement/me/wish", dto); + return data; + } + }); +}; diff --git a/frontend/src/hooks/api/userEngagement/types.ts b/frontend/src/hooks/api/userEngagement/types.ts new file mode 100644 index 0000000000..ad94d03d2e --- /dev/null +++ b/frontend/src/hooks/api/userEngagement/types.ts @@ -0,0 +1,3 @@ +export type TCreateUserWishDto = { + text: string; +}; diff --git a/frontend/src/hooks/api/users/index.tsx b/frontend/src/hooks/api/users/index.tsx index a8ad89f4cf..b9d9f159b8 100644 --- a/frontend/src/hooks/api/users/index.tsx +++ b/frontend/src/hooks/api/users/index.tsx @@ -6,23 +6,28 @@ export { } from "./mutation"; export { fetchOrgUsers, - useAddUserToOrg, + useAddUsersToOrg, useCreateAPIKey, useDeleteAPIKey, + useDeleteMe, useDeleteOrgMembership, - useDeleteUser, useGetMyAPIKeys, useGetMyAPIKeysV2, useGetMyIp, useGetMyOrganizationProjects, useGetMySessions, + useGetOrgMembership, + useGetOrgMembershipProjectMemberships, useGetOrgUsers, useGetUser, useGetUserAction, + useGetUserTotpRegistration, + useListUserGroupMemberships, useLogoutUser, useRegisterUserAction, useRevokeMySessions, - useUpdateMfaEnabled, - useUpdateOrgUserRole, - useUpdateUserAuthMethods + useUpdateOrgMembership, + useUpdateUserAuthMethods, + useUpdateUserMfa } from "./queries"; +export { userKeys } from "./query-keys"; diff --git a/frontend/src/hooks/api/users/mutation.tsx b/frontend/src/hooks/api/users/mutation.tsx index 20e986aab3..e8cf41acbd 100644 --- a/frontend/src/hooks/api/users/mutation.tsx +++ b/frontend/src/hooks/api/users/mutation.tsx @@ -6,7 +6,8 @@ import { } from "@app/components/utilities/cryptography/crypto"; import { apiRequest } from "@app/config/request"; -import { workspaceKeys } from "../workspace/queries"; +import { workspaceKeys } from "../workspace"; +import { userKeys } from "./query-keys"; import { AddUserToWsDTOE2EE, AddUserToWsDTONonE2EE } from "./types"; export const useAddUserToWsE2EE = () => { @@ -50,14 +51,16 @@ export const useAddUserToWsNonE2EE = () => { const queryClient = useQueryClient(); return useMutation<{}, {}, AddUserToWsDTONonE2EE>({ - mutationFn: async ({ projectId, usernames }) => { + mutationFn: async ({ projectId, usernames, roleSlugs }) => { const { data } = await apiRequest.post(`/api/v2/workspace/${projectId}/memberships`, { - usernames + usernames, + roleSlugs }); return data; }, - onSuccess: (_, { projectId }) => { + onSuccess: (_, { orgId, projectId }) => { queryClient.invalidateQueries(workspaceKeys.getWorkspaceUsers(projectId)); + queryClient.invalidateQueries(userKeys.allOrgMembershipProjectMemberships(orgId)); } }); }; @@ -88,3 +91,66 @@ export const useVerifyEmailVerificationCode = () => { } }); }; + +export const useUpdateUserProjectFavorites = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async ({ + orgId, + projectFavorites + }: { + orgId: string; + projectFavorites: string[]; + }) => { + await apiRequest.put("/api/v1/user/me/project-favorites", { + orgId, + projectFavorites + }); + + return {}; + }, + onSuccess: (_, { orgId }) => { + queryClient.invalidateQueries(userKeys.userProjectFavorites(orgId)); + } + }); +}; + +export const useVerifyUserTotpRegistration = () => { + return useMutation({ + mutationFn: async ({ totp }: { totp: string }) => { + await apiRequest.post("/api/v1/user/me/totp/verify", { + totp + }); + + return {}; + } + }); +}; + +export const useDeleteUserTotpConfiguration = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async () => { + await apiRequest.delete("/api/v1/user/me/totp"); + + return {}; + }, + onSuccess: () => { + queryClient.invalidateQueries(userKeys.totpConfiguration); + } + }); +}; + +export const useCreateNewTotpRecoveryCodes = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async () => { + await apiRequest.post("/api/v1/user/me/totp/recovery-codes"); + + return {}; + }, + onSuccess: () => { + queryClient.invalidateQueries(userKeys.totpConfiguration); + } + }); +}; diff --git a/frontend/src/hooks/api/users/queries.tsx b/frontend/src/hooks/api/users/queries.tsx index a443c67500..98d8e4b565 100644 --- a/frontend/src/hooks/api/users/queries.tsx +++ b/frontend/src/hooks/api/users/queries.tsx @@ -1,9 +1,15 @@ import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { AxiosError } from "axios"; import { apiRequest } from "@app/config/request"; +import { SessionStorageKeys } from "@app/const"; import { setAuthToken } from "@app/reactQuery"; import { APIKeyDataV2 } from "../apiKeys/types"; +import { MfaMethod } from "../auth/types"; +import { TGroupWithProjectMemberships } from "../groups/types"; +import { workspaceKeys } from "../workspace"; +import { userKeys } from "./query-keys"; import { AddUserToOrgDTO, APIKeyData, @@ -13,22 +19,12 @@ import { OrgUser, RenameUserDTO, TokenVersion, - UpdateOrgUserRoleDTO, + TWorkspaceUser, + UpdateOrgMembershipDTO, User, UserEnc } from "./types"; -export const userKeys = { - getUser: ["user"] as const, - userAction: ["user-action"] as const, - getOrgUsers: (orgId: string) => [{ orgId }, "user"], - myIp: ["ip"] as const, - myAPIKeys: ["api-keys"] as const, - myAPIKeysV2: ["api-keys-v2"] as const, - mySessions: ["sessions"] as const, - myOrganizationProjects: (orgId: string) => [{ orgId }, "organization-projects"] as const -}; - export const fetchUserDetails = async () => { const { data } = await apiRequest.get<{ user: User & UserEnc }>("/api/v1/user"); @@ -37,7 +33,7 @@ export const fetchUserDetails = async () => { export const useGetUser = () => useQuery(userKeys.getUser, fetchUserDetails); -export const useDeleteUser = () => { +export const useDeleteMe = () => { const queryClient = useQueryClient(); return useMutation({ @@ -73,6 +69,14 @@ export const fetchUserAction = async (action: string) => { return data.userAction || ""; }; +export const fetchUserProjectFavorites = async (orgId: string) => { + const { data } = await apiRequest.get<{ projectFavorites: string[] }>( + `/api/v1/user/me/project-favorites?orgId=${orgId}` + ); + + return data.projectFavorites; +}; + export const useRenameUser = () => { const queryClient = useQueryClient(); @@ -121,6 +125,12 @@ export const fetchOrgUsers = async (orgId: string) => { return data.users; }; +export const useGetUserProjectFavorites = (orgId: string) => + useQuery({ + queryKey: userKeys.userProjectFavorites(orgId), + queryFn: () => fetchUserProjectFavorites(orgId) + }); + export const useGetOrgUsers = (orgId: string) => useQuery({ queryKey: userKeys.getOrgUsers(orgId), @@ -130,12 +140,15 @@ export const useGetOrgUsers = (orgId: string) => // mutation // TODO(akhilmhdh): move all mutation to mutation file -export const useAddUserToOrg = () => { +export const useAddUsersToOrg = () => { const queryClient = useQueryClient(); type Response = { data: { message: string; - completeInviteLink: string | undefined; + completeInviteLinks?: { + email: string; + link: string; + }[]; }; }; @@ -143,12 +156,54 @@ export const useAddUserToOrg = () => { mutationFn: (dto) => { return apiRequest.post("/api/v1/invite-org/signup", dto); }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { organizationId, projects }) => { queryClient.invalidateQueries(userKeys.getOrgUsers(organizationId)); + + projects?.forEach((project) => { + if (project.slug) { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceGroupMemberships(project.slug)); + } + queryClient.invalidateQueries(workspaceKeys.getWorkspaceUsers(project.id)); + }); } }); }; +export const useGetOrgMembership = (organizationId: string, orgMembershipId: string) => { + return useQuery({ + queryKey: userKeys.getOrgMembership(organizationId, orgMembershipId), + queryFn: async () => { + const { + data: { membership } + } = await apiRequest.get<{ membership: OrgUser }>( + `/api/v2/organizations/${organizationId}/memberships/${orgMembershipId}` + ); + + return membership; + }, + enabled: Boolean(organizationId) && Boolean(orgMembershipId) + }); +}; + +export const useGetOrgMembershipProjectMemberships = ( + organizationId: string, + orgMembershipId: string +) => { + return useQuery({ + queryKey: userKeys.forOrgMembershipProjectMemberships(organizationId, orgMembershipId), + queryFn: async () => { + const { + data: { memberships } + } = await apiRequest.get<{ memberships: TWorkspaceUser[] }>( + `/api/v2/organizations/${organizationId}/memberships/${orgMembershipId}/project-memberships` + ); + + return memberships; + }, + enabled: Boolean(organizationId) && Boolean(orgMembershipId) + }); +}; + export const useDeleteOrgMembership = () => { const queryClient = useQueryClient(); @@ -162,24 +217,44 @@ export const useDeleteOrgMembership = () => { }); }; -export const useUpdateOrgUserRole = () => { +export const useDeactivateOrgMembership = () => { const queryClient = useQueryClient(); - return useMutation<{}, {}, UpdateOrgUserRoleDTO>({ - mutationFn: ({ organizationId, membershipId, role }) => { + return useMutation<{}, {}, DeletOrgMembershipDTO>({ + mutationFn: ({ membershipId, orgId }) => { + return apiRequest.post( + `/api/v2/organizations/${orgId}/memberships/${membershipId}/deactivate` + ); + }, + onSuccess: (_, { orgId, membershipId }) => { + queryClient.invalidateQueries(userKeys.getOrgUsers(orgId)); + queryClient.invalidateQueries(userKeys.getOrgMembership(orgId, membershipId)); + } + }); +}; + +export const useUpdateOrgMembership = () => { + const queryClient = useQueryClient(); + + return useMutation<{}, {}, UpdateOrgMembershipDTO>({ + mutationFn: ({ organizationId, membershipId, role, isActive, metadata }) => { return apiRequest.patch( `/api/v2/organizations/${organizationId}/memberships/${membershipId}`, { - role + role, + isActive, + metadata } ); }, - onSuccess: (_, { organizationId }) => { + onSuccess: (_, { organizationId, membershipId }) => { queryClient.invalidateQueries(userKeys.getOrgUsers(organizationId)); + queryClient.invalidateQueries(userKeys.getOrgMembership(organizationId, membershipId)); }, // to remove old states - onError: (_, { organizationId }) => { + onError: (_, { organizationId, membershipId }) => { queryClient.invalidateQueries(userKeys.getOrgUsers(organizationId)); + queryClient.invalidateQueries(userKeys.getOrgMembership(organizationId, membershipId)); } }); }; @@ -213,6 +288,7 @@ export const useLogoutUser = (keepQueryClient?: boolean) => { localStorage.removeItem("PRIVATE_KEY"); localStorage.removeItem("orgData.id"); localStorage.removeItem("projectData.id"); + sessionStorage.removeItem(SessionStorageKeys.CLI_TERMINAL_TOKEN); if (!keepQueryClient) { queryClient.clear(); @@ -316,14 +392,21 @@ export const useRevokeMySessions = () => { }); }; -export const useUpdateMfaEnabled = () => { +export const useUpdateUserMfa = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async ({ isMfaEnabled }: { isMfaEnabled: boolean }) => { + mutationFn: async ({ + isMfaEnabled, + selectedMfaMethod + }: { + isMfaEnabled?: boolean; + selectedMfaMethod?: MfaMethod; + }) => { const { data: { user } } = await apiRequest.patch("/api/v2/users/me/mfa", { - isMfaEnabled + isMfaEnabled, + selectedMfaMethod }); return user; @@ -351,3 +434,60 @@ export const useGetMyOrganizationProjects = (orgId: string) => { enabled: true }); }; + +export const fetchMyPrivateKey = async () => { + const { + data: { privateKey } + } = await apiRequest.get<{ privateKey: string }>("/api/v1/user/private-key"); + + return privateKey; +}; + +export const useListUserGroupMemberships = (username: string) => { + return useQuery({ + queryKey: userKeys.listUserGroupMemberships(username), + queryFn: async () => { + const { data } = await apiRequest.get( + `/api/v1/user/me/${username}/groups` + ); + + return data; + } + }); +}; + +export const useGetUserTotpRegistration = () => { + return useQuery({ + queryKey: userKeys.totpRegistration, + queryFn: async () => { + const { data } = await apiRequest.post<{ otpUrl: string; recoveryCodes: string[] }>( + "/api/v1/user/me/totp/register" + ); + + return data; + } + }); +}; + +export const useGetUserTotpConfiguration = () => { + return useQuery({ + queryKey: userKeys.totpConfiguration, + queryFn: async () => { + try { + const { data } = await apiRequest.get<{ isVerified: boolean; recoveryCodes: string[] }>( + "/api/v1/user/me/totp" + ); + + return data; + } catch (error) { + if (error instanceof AxiosError && [404, 400].includes(error.response?.data?.statusCode)) { + return { + isVerified: false, + recoveryCodes: [] + }; + } + throw error; + } + } + }); +}; diff --git a/frontend/src/hooks/api/users/query-keys.tsx b/frontend/src/hooks/api/users/query-keys.tsx new file mode 100644 index 0000000000..34d969b495 --- /dev/null +++ b/frontend/src/hooks/api/users/query-keys.tsx @@ -0,0 +1,23 @@ +export const userKeys = { + getUser: ["user"] as const, + getPrivateKey: ["user"] as const, + userAction: ["user-action"] as const, + userProjectFavorites: (orgId: string) => [{ orgId }, "user-project-favorites"] as const, + getOrgMembership: (orgId: string, orgMembershipId: string) => + [{ orgId, orgMembershipId }, "org-membership"] as const, + allOrgMembershipProjectMemberships: (orgId: string) => [orgId, "all-user-memberships"] as const, + forOrgMembershipProjectMemberships: (orgId: string, orgMembershipId: string) => + [...userKeys.allOrgMembershipProjectMemberships(orgId), { orgMembershipId }] as const, + getOrgMembershipProjectMemberships: (orgId: string, username: string) => + [{ orgId, username }, "org-membership-project-memberships"] as const, + getOrgUsers: (orgId: string) => [{ orgId }, "user"], + myIp: ["ip"] as const, + myAPIKeys: ["api-keys"] as const, + myAPIKeysV2: ["api-keys-v2"] as const, + mySessions: ["sessions"] as const, + listUsers: ["user-list"] as const, + totpRegistration: ["totp-registration"], + totpConfiguration: ["totp-configuration"], + listUserGroupMemberships: (username: string) => [{ username }, "user-group-memberships"] as const, + myOrganizationProjects: (orgId: string) => [{ orgId }, "organization-projects"] as const +}; diff --git a/frontend/src/hooks/api/users/types.ts b/frontend/src/hooks/api/users/types.ts index 649af434cf..594287c9cd 100644 --- a/frontend/src/hooks/api/users/types.ts +++ b/frontend/src/hooks/api/users/types.ts @@ -1,4 +1,6 @@ +import { MfaMethod } from "../auth/types"; import { UserWsKeyPair } from "../keys/types"; +import { ProjectUserMembershipTemporaryMode } from "../workspace/types"; export enum AuthMethod { EMAIL = "email", @@ -9,7 +11,9 @@ export enum AuthMethod { AZURE_SAML = "azure-saml", JUMPCLOUD_SAML = "jumpcloud-saml", KEYCLOAK_SAML = "keycloak-saml", - LDAP = "ldap" + LDAP = "ldap", + OIDC = "oidc", + SAML = "saml" } export type User = { @@ -23,13 +27,15 @@ export type User = { authProvider?: AuthMethod; authMethods: AuthMethod[]; isMfaEnabled: boolean; + selectedMfaMethod?: MfaMethod; seenIps: string[]; id: string; }; export enum UserAliasType { LDAP = "ldap", - SAML = "saml" + SAML = "saml", + OIDC = "oidc" } export type UserEnc = { @@ -45,13 +51,16 @@ export type UserEnc = { export type OrgUser = { id: string; + metadata: { key: string; value: string; id: string }[]; user: { username: string; email?: string; + isEmailVerified: boolean; firstName: string; lastName: string; id: string; publicKey: string; + superAdmin: boolean; }; inviteEmail: string; organization: string; @@ -59,11 +68,11 @@ export type OrgUser = { status: "invited" | "accepted" | "verified" | "completed"; deniedPermissions: any[]; roleId: string; + isActive: boolean; }; export type TProjectMembership = { id: string; - role: string; createdAt: string; updatedAt: string; projectId: string; @@ -80,6 +89,13 @@ export type TWorkspaceUser = { id: string; publicKey: string; }; + createdAt: string; + projectId: string; + isGroupMember: boolean; + project: { + id: string; + name: string; + }; inviteEmail: string; organization: string; roles: ( @@ -103,7 +119,7 @@ export type TWorkspaceUser = { customRoleSlug: string; isTemporary: true; temporaryRange: string; - temporaryMode: string; + temporaryMode: ProjectUserMembershipTemporaryMode; temporaryAccessEndTime: string; temporaryAccessStartTime: string; } @@ -125,12 +141,16 @@ export type AddUserToWsDTOE2EE = { export type AddUserToWsDTONonE2EE = { projectId: string; usernames: string[]; + roleSlugs?: string[]; + orgId: string; }; -export type UpdateOrgUserRoleDTO = { +export type UpdateOrgMembershipDTO = { organizationId: string; membershipId: string; - role: string; + role?: string; + isActive?: boolean; + metadata?: { key: string; value: string }[]; }; export type DeletOrgMembershipDTO = { @@ -139,8 +159,12 @@ export type DeletOrgMembershipDTO = { }; export type AddUserToOrgDTO = { - inviteeEmail: string; + inviteeEmails: string[]; + organizationRoleSlug: string; organizationId: string; + + // We need the slug in order to invalidate the groups query. `slug` is only used for invalidation purposes. + projects?: { id: string; slug?: string; projectRoleSlug: string[] }[]; }; export type CreateAPIKeyRes = { diff --git a/frontend/src/hooks/api/webhooks/types.ts b/frontend/src/hooks/api/webhooks/types.ts index 447ed4fc56..86183bf1ba 100644 --- a/frontend/src/hooks/api/webhooks/types.ts +++ b/frontend/src/hooks/api/webhooks/types.ts @@ -1,5 +1,11 @@ +export enum WebhookType { + GENERAL = "general", + SLACK = "slack" +} + export type TWebhook = { id: string; + type: WebhookType; projectId: string; environment: { slug: string; @@ -22,6 +28,7 @@ export type TCreateWebhookDto = { webhookUrl: string; webhookSecretKey?: string; secretPath: string; + type: WebhookType; }; export type TUpdateWebhookDto = { diff --git a/frontend/src/hooks/api/workflowIntegrations/index.ts b/frontend/src/hooks/api/workflowIntegrations/index.ts new file mode 100644 index 0000000000..e4730bd7d9 --- /dev/null +++ b/frontend/src/hooks/api/workflowIntegrations/index.ts @@ -0,0 +1,13 @@ +export { + useDeleteSlackIntegration, + useUpdateProjectSlackConfig, + useUpdateSlackIntegration +} from "./mutation"; +export { + fetchSlackInstallUrl, + fetchSlackReinstallUrl, + useGetSlackIntegrationById, + useGetSlackIntegrationChannels, + useGetSlackIntegrations, + useGetWorkflowIntegrations +} from "./queries"; diff --git a/frontend/src/hooks/api/workflowIntegrations/mutation.tsx b/frontend/src/hooks/api/workflowIntegrations/mutation.tsx new file mode 100644 index 0000000000..bf43325ff0 --- /dev/null +++ b/frontend/src/hooks/api/workflowIntegrations/mutation.tsx @@ -0,0 +1,60 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { workspaceKeys } from "../workspace/query-keys"; +import { workflowIntegrationKeys } from "./queries"; +import { + TDeleteSlackIntegrationDTO, + TUpdateProjectSlackConfigDTO, + TUpdateSlackIntegrationDTO +} from "./types"; + +export const useUpdateSlackIntegration = () => { + const queryClient = useQueryClient(); + + return useMutation<{}, {}, TUpdateSlackIntegrationDTO>({ + mutationFn: async (dto) => { + const { data } = await apiRequest.patch(`/api/v1/workflow-integrations/slack/${dto.id}`, dto); + + return data; + }, + onSuccess: (_, { orgId, id }) => { + queryClient.invalidateQueries(workflowIntegrationKeys.getSlackIntegration(id)); + queryClient.invalidateQueries(workflowIntegrationKeys.getSlackIntegrations(orgId)); + } + }); +}; + +export const useDeleteSlackIntegration = () => { + const queryClient = useQueryClient(); + + return useMutation<{}, {}, TDeleteSlackIntegrationDTO>({ + mutationFn: async (dto) => { + const { data } = await apiRequest.delete(`/api/v1/workflow-integrations/slack/${dto.id}`); + + return data; + }, + onSuccess: (_, { orgId, id }) => { + queryClient.invalidateQueries(workflowIntegrationKeys.getSlackIntegration(id)); + queryClient.invalidateQueries(workflowIntegrationKeys.getIntegrations(orgId)); + } + }); +}; + +export const useUpdateProjectSlackConfig = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: async (dto: TUpdateProjectSlackConfigDTO) => { + const { data } = await apiRequest.put( + `/api/v1/workspace/${dto.workspaceId}/slack-config`, + dto + ); + + return data; + }, + onSuccess: (_, { workspaceId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceSlackConfig(workspaceId)); + } + }); +}; diff --git a/frontend/src/hooks/api/workflowIntegrations/queries.tsx b/frontend/src/hooks/api/workflowIntegrations/queries.tsx new file mode 100644 index 0000000000..bcf092c73b --- /dev/null +++ b/frontend/src/hooks/api/workflowIntegrations/queries.tsx @@ -0,0 +1,95 @@ +import { useQuery } from "@tanstack/react-query"; + +import { apiRequest } from "@app/config/request"; + +import { SlackIntegration, SlackIntegrationChannel, WorkflowIntegration } from "./types"; + +export const workflowIntegrationKeys = { + getIntegrations: (orgId?: string) => [{ orgId }, "workflow-integrations"], + getSlackIntegrations: (orgId?: string) => [{ orgId }, "slack-workflow-integrations"], + getSlackIntegration: (id?: string) => [{ id }, "slack-workflow-integration"], + getSlackIntegrationChannels: (id?: string) => [{ id }, "slack-workflow-integration-channels"] +}; + +export const fetchSlackInstallUrl = async ({ + slug, + description +}: { + slug: string; + description?: string; +}) => { + const { data } = await apiRequest.get("/api/v1/workflow-integrations/slack/install", { + params: { + slug, + description + } + }); + + return data; +}; + +export const fetchSlackReinstallUrl = async ({ id }: { id: string }) => { + const { data } = await apiRequest.get("/api/v1/workflow-integrations/slack/reinstall", { + params: { + id + } + }); + + return data; +}; + +export const fetchSlackIntegrations = async () => { + const { data } = await apiRequest.get("/api/v1/workflow-integrations/slack"); + + return data; +}; + +export const fetchSlackIntegrationById = async (id?: string) => { + const { data } = await apiRequest.get( + `/api/v1/workflow-integrations/slack/${id}` + ); + + return data; +}; + +export const fetchSlackIntegrationChannels = async (id?: string) => { + const { data } = await apiRequest.get( + `/api/v1/workflow-integrations/slack/${id}/channels` + ); + + return data; +}; + +export const fetchWorkflowIntegrations = async () => { + const { data } = await apiRequest.get("/api/v1/workflow-integrations"); + + return data; +}; + +export const useGetSlackIntegrations = (orgId?: string) => + useQuery({ + queryKey: workflowIntegrationKeys.getSlackIntegrations(orgId), + queryFn: () => fetchSlackIntegrations(), + enabled: Boolean(orgId) + }); + +export const useGetSlackIntegrationById = (id?: string) => + useQuery({ + queryKey: workflowIntegrationKeys.getSlackIntegration(id), + queryFn: () => fetchSlackIntegrationById(id), + enabled: Boolean(id) + }); + +export const useGetSlackIntegrationChannels = (id?: string) => + useQuery({ + queryKey: workflowIntegrationKeys.getSlackIntegrationChannels(id), + queryFn: () => fetchSlackIntegrationChannels(id), + enabled: Boolean(id) + }); + +export const useGetWorkflowIntegrations = (id?: string) => + useQuery({ + queryKey: workflowIntegrationKeys.getIntegrations(id), + queryFn: () => fetchWorkflowIntegrations(), + enabled: Boolean(id) + }); diff --git a/frontend/src/hooks/api/workflowIntegrations/types.ts b/frontend/src/hooks/api/workflowIntegrations/types.ts new file mode 100644 index 0000000000..e301937761 --- /dev/null +++ b/frontend/src/hooks/api/workflowIntegrations/types.ts @@ -0,0 +1,52 @@ +export enum WorkflowIntegrationPlatform { + SLACK = "slack" +} + +export type WorkflowIntegration = { + id: string; + slug: string; + description: string; + integration: WorkflowIntegrationPlatform; +}; + +export type SlackIntegration = { + id: string; + slug: string; + description: string; + teamName: string; +}; + +export type SlackIntegrationChannel = { + id: string; + name: string; +}; + +export type TUpdateSlackIntegrationDTO = { + id: string; + orgId: string; + slug?: string; + description?: string; +}; + +export type TDeleteSlackIntegrationDTO = { + id: string; + orgId: string; +}; + +export type ProjectSlackConfig = { + id: string; + slackIntegrationId: string; + isAccessRequestNotificationEnabled: boolean; + accessRequestChannels: string; + isSecretRequestNotificationEnabled: boolean; + secretRequestChannels: string; +}; + +export type TUpdateProjectSlackConfigDTO = { + workspaceId: string; + slackIntegrationId: string; + isAccessRequestNotificationEnabled: boolean; + accessRequestChannels: string; + isSecretRequestNotificationEnabled: boolean; + secretRequestChannels: string; +}; diff --git a/frontend/src/hooks/api/workspace/index.tsx b/frontend/src/hooks/api/workspace/index.tsx index b0cadac23f..8d44d8b746 100644 --- a/frontend/src/hooks/api/workspace/index.tsx +++ b/frontend/src/hooks/api/workspace/index.tsx @@ -1,6 +1,8 @@ export { useAddGroupToWorkspace, useDeleteGroupFromWorkspace, + useLeaveProject, + useMigrateProjectToV3, useUpdateGroupWorkspaceRole } from "./mutations"; export { @@ -16,17 +18,25 @@ export { useGetUserWorkspaces, useGetWorkspaceAuthorizations, useGetWorkspaceById, + useGetWorkspaceIdentityMembershipDetails, useGetWorkspaceIdentityMemberships, useGetWorkspaceIndexStatus, useGetWorkspaceIntegrations, useGetWorkspaceSecrets, + useGetWorkspaceSlackConfig, + useGetWorkspaceUserDetails, useGetWorkspaceUsers, + useListWorkspaceCas, + useListWorkspaceCertificates, + useListWorkspaceCertificateTemplates, useListWorkspaceGroups, + useListWorkspacePkiAlerts, + useListWorkspacePkiCollections, useNameWorkspaceSecrets, useRenameWorkspace, useToggleAutoCapitalization, useUpdateIdentityWorkspaceRole, useUpdateUserWorkspaceRole, useUpdateWsEnvironment, - useUpgradeProject -} from "./queries"; + useUpgradeProject} from "./queries"; +export { workspaceKeys } from "./query-keys"; diff --git a/frontend/src/hooks/api/workspace/mutations.tsx b/frontend/src/hooks/api/workspace/mutations.tsx index 11853157f8..ae88295910 100644 --- a/frontend/src/hooks/api/workspace/mutations.tsx +++ b/frontend/src/hooks/api/workspace/mutations.tsx @@ -2,30 +2,32 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; -import { workspaceKeys } from "./queries"; +import { userKeys } from "../users/query-keys"; +import { workspaceKeys } from "./query-keys"; import { TUpdateWorkspaceGroupRoleDTO } from "./types"; export const useAddGroupToWorkspace = () => { const queryClient = useQueryClient(); return useMutation({ mutationFn: async ({ - groupSlug, - projectSlug, + groupId, + projectId, role }: { - groupSlug: string; - projectSlug: string; + groupId: string; + projectId: string; role?: string; }) => { const { data: { groupMembership } - } = await apiRequest.post(`/api/v2/workspace/${projectSlug}/groups/${groupSlug}`, { + } = await apiRequest.post(`/api/v2/workspace/${projectId}/groups/${groupId}`, { role }); + return groupMembership; }, - onSuccess: (_, { projectSlug }) => { - queryClient.invalidateQueries(workspaceKeys.getWorkspaceGroupMemberships(projectSlug)); + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceGroupMemberships(projectId)); } }); }; @@ -33,17 +35,17 @@ export const useAddGroupToWorkspace = () => { export const useUpdateGroupWorkspaceRole = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async ({ groupSlug, projectSlug, roles }: TUpdateWorkspaceGroupRoleDTO) => { + mutationFn: async ({ groupId, projectId, roles }: TUpdateWorkspaceGroupRoleDTO) => { const { data: { groupMembership } - } = await apiRequest.patch(`/api/v2/workspace/${projectSlug}/groups/${groupSlug}`, { + } = await apiRequest.patch(`/api/v2/workspace/${projectId}/groups/${groupId}`, { roles }); return groupMembership; }, - onSuccess: (_, { projectSlug }) => { - queryClient.invalidateQueries(workspaceKeys.getWorkspaceGroupMemberships(projectSlug)); + onSuccess: (_, { projectId }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceGroupMemberships(projectId)); } }); }; @@ -51,14 +53,49 @@ export const useUpdateGroupWorkspaceRole = () => { export const useDeleteGroupFromWorkspace = () => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: async ({ groupSlug, projectSlug }: { groupSlug: string; projectSlug: string }) => { + mutationFn: async ({ + groupId, + projectId + }: { + groupId: string; + projectId: string; + username?: string; + }) => { const { data: { groupMembership } - } = await apiRequest.delete(`/api/v2/workspace/${projectSlug}/groups/${groupSlug}`); + } = await apiRequest.delete(`/api/v2/workspace/${projectId}/groups/${groupId}`); return groupMembership; }, - onSuccess: (_, { projectSlug }) => { - queryClient.invalidateQueries(workspaceKeys.getWorkspaceGroupMemberships(projectSlug)); + onSuccess: (_, { projectId, username }) => { + queryClient.invalidateQueries(workspaceKeys.getWorkspaceGroupMemberships(projectId)); + + if (username) { + queryClient.invalidateQueries(userKeys.listUserGroupMemberships(username)); + } + } + }); +}; + +export const useLeaveProject = () => { + const queryClient = useQueryClient(); + return useMutation<{}, {}, { workspaceId: string }>({ + mutationFn: ({ workspaceId }) => { + return apiRequest.delete(`/api/v1/workspace/${workspaceId}/leave`); + }, + onSuccess: () => { + queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); + } + }); +}; + +export const useMigrateProjectToV3 = () => { + const queryClient = useQueryClient(); + return useMutation<{}, {}, { workspaceId: string }>({ + mutationFn: ({ workspaceId }) => { + return apiRequest.post(`/api/v1/workspace/${workspaceId}/migrate-v3`); + }, + onSuccess: () => { + queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); } }); }; diff --git a/frontend/src/hooks/api/workspace/queries.tsx b/frontend/src/hooks/api/workspace/queries.tsx index 71dbb8e01e..f721d74083 100644 --- a/frontend/src/hooks/api/workspace/queries.tsx +++ b/frontend/src/hooks/api/workspace/queries.tsx @@ -1,47 +1,43 @@ -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useMutation, useQuery, useQueryClient, UseQueryOptions } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; +import { OrderByDirection } from "@app/hooks/api/generic/types"; +import { CaStatus } from "../ca/enums"; +import { TCertificateAuthority } from "../ca/types"; +import { TCertificate } from "../certificates/types"; +import { TCertificateTemplate } from "../certificateTemplates/types"; import { TGroupMembership } from "../groups/types"; -import { IdentityMembership } from "../identities/types"; +import { identitiesKeys } from "../identities/queries"; +import { IdentityMembership, TProjectIdentitiesList } from "../identities/types"; import { IntegrationAuth } from "../integrationAuth/types"; import { TIntegration } from "../integrations/types"; +import { TPkiAlert } from "../pkiAlerts/types"; +import { TPkiCollection } from "../pkiCollections/types"; import { EncryptedSecret } from "../secrets/types"; +import { userKeys } from "../users/query-keys"; import { TWorkspaceUser } from "../users/types"; +import { ProjectSlackConfig } from "../workflowIntegrations/types"; +import { workspaceKeys } from "./query-keys"; import { CreateEnvironmentDTO, CreateWorkspaceDTO, DeleteEnvironmentDTO, DeleteWorkspaceDTO, NameWorkspaceSecretsDTO, + ProjectIdentityOrderBy, RenameWorkspaceDTO, TGetUpgradeProjectStatusDTO, + TListProjectIdentitiesDTO, ToggleAutoCapitalizationDTO, TUpdateWorkspaceIdentityRoleDTO, TUpdateWorkspaceUserRoleDTO, + UpdateAuditLogsRetentionDTO, UpdateEnvironmentDTO, + UpdatePitVersionLimitDTO, Workspace } from "./types"; -export const workspaceKeys = { - getWorkspaceById: (workspaceId: string) => [{ workspaceId }, "workspace"] as const, - getWorkspaceSecrets: (workspaceId: string) => [{ workspaceId }, "workspace-secrets"] as const, - getWorkspaceIndexStatus: (workspaceId: string) => - [{ workspaceId }, "workspace-index-status"] as const, - getProjectUpgradeStatus: (workspaceId: string) => [{ workspaceId }, "workspace-upgrade-status"], - getWorkspaceMemberships: (orgId: string) => [{ orgId }, "workspace-memberships"], - getWorkspaceAuthorization: (workspaceId: string) => [{ workspaceId }, "workspace-authorizations"], - getWorkspaceIntegrations: (workspaceId: string) => [{ workspaceId }, "workspace-integrations"], - getAllUserWorkspace: ["workspaces"] as const, - getWorkspaceAuditLogs: (workspaceId: string) => - [{ workspaceId }, "workspace-audit-logs"] as const, - getWorkspaceUsers: (workspaceId: string) => [{ workspaceId }, "workspace-users"] as const, - getWorkspaceIdentityMemberships: (workspaceId: string) => - [{ workspaceId }, "workspace-identity-memberships"] as const, - getWorkspaceGroupMemberships: (workspaceId: string) => - [{ workspaceId }, "workspace-groups"] as const -}; - const fetchWorkspaceById = async (workspaceId: string) => { const { data } = await apiRequest.get<{ workspace: Workspace }>( `/api/v1/workspace/${workspaceId}` @@ -106,8 +102,12 @@ export const useGetUpgradeProjectStatus = ({ }); }; -const fetchUserWorkspaces = async () => { - const { data } = await apiRequest.get<{ workspaces: Workspace[] }>("/api/v1/workspace"); +const fetchUserWorkspaces = async (includeRoles?: boolean) => { + const { data } = await apiRequest.get<{ workspaces: Workspace[] }>("/api/v1/workspace", { + params: { + includeRoles + } + }); return data.workspaces; }; @@ -127,16 +127,20 @@ export const useGetWorkspaceSecrets = (workspaceId: string) => { }); }; -export const useGetWorkspaceById = (workspaceId: string) => { +export const useGetWorkspaceById = ( + workspaceId: string, + dto?: { refetchInterval?: number | false } +) => { return useQuery({ queryKey: workspaceKeys.getWorkspaceById(workspaceId), queryFn: () => fetchWorkspaceById(workspaceId), - enabled: true + enabled: Boolean(workspaceId), + refetchInterval: dto?.refetchInterval }); }; -export const useGetUserWorkspaces = () => - useQuery(workspaceKeys.getAllUserWorkspace, fetchUserWorkspaces); +export const useGetUserWorkspaces = (includeRoles?: boolean) => + useQuery(workspaceKeys.getAllUserWorkspace, () => fetchUserWorkspaces(includeRoles)); const fetchUserWorkspaceMemberships = async (orgId: string) => { const { data } = await apiRequest.get>( @@ -203,18 +207,22 @@ export const useGetWorkspaceIntegrations = (workspaceId: string) => }); export const createWorkspace = ({ - projectName + projectName, + kmsKeyId, + template }: CreateWorkspaceDTO): Promise<{ data: { project: Workspace } }> => { - return apiRequest.post("/api/v2/workspace", { projectName }); + return apiRequest.post("/api/v2/workspace", { projectName, kmsKeyId, template }); }; export const useCreateWorkspace = () => { const queryClient = useQueryClient(); return useMutation<{ data: { project: Workspace } }, {}, CreateWorkspaceDTO>({ - mutationFn: async ({ projectName }) => + mutationFn: async ({ projectName, kmsKeyId, template }) => createWorkspace({ - projectName + projectName, + kmsKeyId, + template }), onSuccess: () => { queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); @@ -249,6 +257,36 @@ export const useToggleAutoCapitalization = () => { }); }; +export const useUpdateWorkspaceVersionLimit = () => { + const queryClient = useQueryClient(); + + return useMutation<{}, {}, UpdatePitVersionLimitDTO>({ + mutationFn: ({ projectSlug, pitVersionLimit }) => { + return apiRequest.put(`/api/v1/workspace/${projectSlug}/version-limit`, { + pitVersionLimit + }); + }, + onSuccess: () => { + queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); + } + }); +}; + +export const useUpdateWorkspaceAuditLogsRetention = () => { + const queryClient = useQueryClient(); + + return useMutation<{}, {}, UpdateAuditLogsRetentionDTO>({ + mutationFn: ({ projectSlug, auditLogsRetentionDays }) => { + return apiRequest.put(`/api/v1/workspace/${projectSlug}/audit-logs-retention`, { + auditLogsRetentionDays + }); + }, + onSuccess: () => { + queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); + } + }); +}; + export const useDeleteWorkspace = () => { const queryClient = useQueryClient(); @@ -258,6 +296,7 @@ export const useDeleteWorkspace = () => { }, onSuccess: () => { queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace); + queryClient.invalidateQueries(["org-admin-projects"]); } }); }; @@ -308,14 +347,19 @@ export const useDeleteWsEnvironment = () => { }); }; -export const useGetWorkspaceUsers = (workspaceId: string) => { +export const useGetWorkspaceUsers = (workspaceId: string, includeGroupMembers?: boolean) => { return useQuery({ queryKey: workspaceKeys.getWorkspaceUsers(workspaceId), queryFn: async () => { const { data: { users } } = await apiRequest.get<{ users: TWorkspaceUser[] }>( - `/api/v1/workspace/${workspaceId}/users` + `/api/v1/workspace/${workspaceId}/users`, + { + params: { + includeGroupMembers + } + } ); return users; }, @@ -323,6 +367,21 @@ export const useGetWorkspaceUsers = (workspaceId: string) => { }); }; +export const useGetWorkspaceUserDetails = (workspaceId: string, membershipId: string) => { + return useQuery({ + queryKey: workspaceKeys.getWorkspaceUserDetails(workspaceId, membershipId), + queryFn: async () => { + const { + data: { membership } + } = await apiRequest.get<{ membership: TWorkspaceUser }>( + `/api/v1/workspace/${workspaceId}/memberships/${membershipId}` + ); + return membership; + }, + enabled: Boolean(workspaceId) && Boolean(membershipId) + }); +}; + export const useDeleteUserFromWorkspace = () => { const queryClient = useQueryClient(); @@ -333,6 +392,7 @@ export const useDeleteUserFromWorkspace = () => { }: { workspaceId: string; usernames: string[]; + orgId: string; }) => { const { data: { deletedMembership } @@ -341,8 +401,9 @@ export const useDeleteUserFromWorkspace = () => { }); return deletedMembership; }, - onSuccess: (_, { workspaceId }) => { + onSuccess: (_, { orgId, workspaceId }) => { queryClient.invalidateQueries(workspaceKeys.getWorkspaceUsers(workspaceId)); + queryClient.invalidateQueries(userKeys.allOrgMembershipProjectMemberships(orgId)); } }); }; @@ -361,8 +422,11 @@ export const useUpdateUserWorkspaceRole = () => { ); return membership; }, - onSuccess: (_, { workspaceId }) => { + onSuccess: (_, { workspaceId, membershipId }) => { queryClient.invalidateQueries(workspaceKeys.getWorkspaceUsers(workspaceId)); + queryClient.invalidateQueries( + workspaceKeys.getWorkspaceUserDetails(workspaceId, membershipId) + ); } }); }; @@ -390,8 +454,9 @@ export const useAddIdentityToWorkspace = () => { return identityMembership; }, - onSuccess: (_, { workspaceId }) => { + onSuccess: (_, { identityId, workspaceId }) => { queryClient.invalidateQueries(workspaceKeys.getWorkspaceIdentityMemberships(workspaceId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityProjectMemberships(identityId)); } }); }; @@ -411,8 +476,12 @@ export const useUpdateIdentityWorkspaceRole = () => { return identityMembership; }, - onSuccess: (_, { workspaceId }) => { + onSuccess: (_, { identityId, workspaceId }) => { queryClient.invalidateQueries(workspaceKeys.getWorkspaceIdentityMemberships(workspaceId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityProjectMemberships(identityId)); + queryClient.invalidateQueries( + workspaceKeys.getWorkspaceIdentityMembershipDetails(workspaceId, identityId) + ); } }); }; @@ -434,38 +503,216 @@ export const useDeleteIdentityFromWorkspace = () => { ); return identityMembership; }, - onSuccess: (_, { workspaceId }) => { + onSuccess: (_, { identityId, workspaceId }) => { queryClient.invalidateQueries(workspaceKeys.getWorkspaceIdentityMemberships(workspaceId)); + queryClient.invalidateQueries(identitiesKeys.getIdentityProjectMemberships(identityId)); } }); }; -export const useGetWorkspaceIdentityMemberships = (workspaceId: string) => { +export const useGetWorkspaceIdentityMemberships = ( + { + workspaceId, + offset = 0, + limit = 100, + orderBy = ProjectIdentityOrderBy.Name, + orderDirection = OrderByDirection.ASC, + search = "" + }: TListProjectIdentitiesDTO, + options?: Omit< + UseQueryOptions< + TProjectIdentitiesList, + unknown, + TProjectIdentitiesList, + ReturnType + >, + "queryKey" | "queryFn" + > +) => { return useQuery({ - queryKey: workspaceKeys.getWorkspaceIdentityMemberships(workspaceId), + queryKey: workspaceKeys.getWorkspaceIdentityMembershipsWithParams({ + workspaceId, + offset, + limit, + orderBy, + orderDirection, + search + }), queryFn: async () => { - const { - data: { identityMemberships } - } = await apiRequest.get<{ identityMemberships: IdentityMembership[] }>( - `/api/v2/workspace/${workspaceId}/identity-memberships` + const params = new URLSearchParams({ + offset: String(offset), + limit: String(limit), + orderBy: String(orderBy), + orderDirection: String(orderDirection), + search: String(search) + }); + + const { data } = await apiRequest.get( + `/api/v2/workspace/${workspaceId}/identity-memberships`, + { params } ); - return identityMemberships; + return data; }, - enabled: true + enabled: true, + ...options }); }; -export const useListWorkspaceGroups = (projectSlug: string) => { +export const useGetWorkspaceIdentityMembershipDetails = (projectId: string, identityId: string) => { return useQuery({ - queryKey: workspaceKeys.getWorkspaceGroupMemberships(projectSlug), + enabled: Boolean(projectId && identityId), + queryKey: workspaceKeys.getWorkspaceIdentityMembershipDetails(projectId, identityId), + queryFn: async () => { + const { + data: { identityMembership } + } = await apiRequest.get<{ identityMembership: IdentityMembership }>( + `/api/v2/workspace/${projectId}/identity-memberships/${identityId}` + ); + return identityMembership; + } + }); +}; + +export const useListWorkspaceGroups = (projectId: string) => { + return useQuery({ + queryKey: workspaceKeys.getWorkspaceGroupMemberships(projectId), queryFn: async () => { const { data: { groupMemberships } } = await apiRequest.get<{ groupMemberships: TGroupMembership[] }>( - `/api/v2/workspace/${projectSlug}/groups` + `/api/v2/workspace/${projectId}/groups` ); return groupMemberships; }, enabled: true }); }; + +export const useListWorkspaceCas = ({ + projectSlug, + status +}: { + projectSlug: string; + status?: CaStatus; +}) => { + return useQuery({ + queryKey: workspaceKeys.specificWorkspaceCas({ + projectSlug, + status + }), + queryFn: async () => { + const params = new URLSearchParams({ + ...(status && { status }) + }); + + const { + data: { cas } + } = await apiRequest.get<{ cas: TCertificateAuthority[] }>( + `/api/v2/workspace/${projectSlug}/cas`, + { + params + } + ); + return cas; + }, + enabled: Boolean(projectSlug) + }); +}; + +export const useListWorkspaceCertificates = ({ + projectSlug, + offset, + limit +}: { + projectSlug: string; + offset: number; + limit: number; +}) => { + return useQuery({ + queryKey: workspaceKeys.specificWorkspaceCertificates({ + slug: projectSlug, + offset, + limit + }), + queryFn: async () => { + const params = new URLSearchParams({ + offset: String(offset), + limit: String(limit) + }); + + const { + data: { certificates, totalCount } + } = await apiRequest.get<{ certificates: TCertificate[]; totalCount: number }>( + `/api/v2/workspace/${projectSlug}/certificates`, + { + params + } + ); + + return { certificates, totalCount }; + }, + enabled: Boolean(projectSlug) + }); +}; + +export const useListWorkspacePkiAlerts = ({ workspaceId }: { workspaceId: string }) => { + return useQuery({ + queryKey: workspaceKeys.getWorkspacePkiAlerts(workspaceId), + queryFn: async () => { + const { + data: { alerts } + } = await apiRequest.get<{ alerts: TPkiAlert[] }>( + `/api/v2/workspace/${workspaceId}/pki-alerts` + ); + + return { alerts }; + }, + enabled: Boolean(workspaceId) + }); +}; + +export const useListWorkspacePkiCollections = ({ workspaceId }: { workspaceId: string }) => { + return useQuery({ + queryKey: workspaceKeys.getWorkspacePkiCollections(workspaceId), + queryFn: async () => { + const { + data: { collections } + } = await apiRequest.get<{ collections: TPkiCollection[] }>( + `/api/v2/workspace/${workspaceId}/pki-collections` + ); + + return { collections }; + }, + enabled: Boolean(workspaceId) + }); +}; + +export const useListWorkspaceCertificateTemplates = ({ workspaceId }: { workspaceId: string }) => { + return useQuery({ + queryKey: workspaceKeys.getWorkspaceCertificateTemplates(workspaceId), + queryFn: async () => { + const { + data: { certificateTemplates } + } = await apiRequest.get<{ certificateTemplates: TCertificateTemplate[] }>( + `/api/v2/workspace/${workspaceId}/certificate-templates` + ); + + return { certificateTemplates }; + }, + enabled: Boolean(workspaceId) + }); +}; + +export const useGetWorkspaceSlackConfig = ({ workspaceId }: { workspaceId: string }) => { + return useQuery({ + queryKey: workspaceKeys.getWorkspaceSlackConfig(workspaceId), + queryFn: async () => { + const { data } = await apiRequest.get( + `/api/v1/workspace/${workspaceId}/slack-config` + ); + + return data; + }, + enabled: Boolean(workspaceId) + }); +}; diff --git a/frontend/src/hooks/api/workspace/query-keys.tsx b/frontend/src/hooks/api/workspace/query-keys.tsx new file mode 100644 index 0000000000..f5a02ec2b6 --- /dev/null +++ b/frontend/src/hooks/api/workspace/query-keys.tsx @@ -0,0 +1,56 @@ +import { TListProjectIdentitiesDTO } from "@app/hooks/api/workspace/types"; + +import type { CaStatus } from "../ca"; + +export const workspaceKeys = { + getWorkspaceById: (workspaceId: string) => [{ workspaceId }, "workspace"] as const, + getWorkspaceSecrets: (workspaceId: string) => [{ workspaceId }, "workspace-secrets"] as const, + getWorkspaceIndexStatus: (workspaceId: string) => + [{ workspaceId }, "workspace-index-status"] as const, + getProjectUpgradeStatus: (workspaceId: string) => [{ workspaceId }, "workspace-upgrade-status"], + getWorkspaceMemberships: (orgId: string) => [{ orgId }, "workspace-memberships"], + getWorkspaceAuthorization: (workspaceId: string) => [{ workspaceId }, "workspace-authorizations"], + getWorkspaceIntegrations: (workspaceId: string) => [{ workspaceId }, "workspace-integrations"], + getAllUserWorkspace: ["workspaces"] as const, + getWorkspaceAuditLogs: (workspaceId: string) => + [{ workspaceId }, "workspace-audit-logs"] as const, + getWorkspaceUsers: (workspaceId: string) => [{ workspaceId }, "workspace-users"] as const, + getWorkspaceUserDetails: (workspaceId: string, membershipId: string) => + [{ workspaceId, membershipId }, "workspace-user-details"] as const, + getWorkspaceIdentityMemberships: (workspaceId: string) => + [{ workspaceId }, "workspace-identity-memberships"] as const, + getWorkspaceIdentityMembershipDetails: (workspaceId: string, identityId: string) => + [{ workspaceId, identityId }, "workspace-identity-membership-details"] as const, + // allows invalidation using above key without knowing params + getWorkspaceIdentityMembershipsWithParams: ({ + workspaceId, + ...params + }: TListProjectIdentitiesDTO) => + [...workspaceKeys.getWorkspaceIdentityMemberships(workspaceId), params] as const, + getWorkspaceGroupMemberships: (workspaceId: string) => + [{ workspaceId }, "workspace-groups"] as const, + getWorkspaceCas: ({ projectSlug }: { projectSlug: string }) => + [{ projectSlug }, "workspace-cas"] as const, + specificWorkspaceCas: ({ projectSlug, status }: { projectSlug: string; status?: CaStatus }) => + [...workspaceKeys.getWorkspaceCas({ projectSlug }), { status }] as const, + allWorkspaceCertificates: () => ["workspace-certificates"] as const, + forWorkspaceCertificates: (slug: string) => + [...workspaceKeys.allWorkspaceCertificates(), slug] as const, + specificWorkspaceCertificates: ({ + slug, + offset, + limit + }: { + slug: string; + offset: number; + limit: number; + }) => [...workspaceKeys.forWorkspaceCertificates(slug), { offset, limit }] as const, + getWorkspacePkiAlerts: (workspaceId: string) => + [{ workspaceId }, "workspace-pki-alerts"] as const, + getWorkspacePkiCollections: (workspaceId: string) => + [{ workspaceId }, "workspace-pki-collections"] as const, + getWorkspaceCertificateTemplates: (workspaceId: string) => + [{ workspaceId }, "workspace-certificate-templates"] as const, + getWorkspaceSlackConfig: (workspaceId: string) => + [{ workspaceId }, "workspace-slack-config"] as const +}; diff --git a/frontend/src/hooks/api/workspace/types.ts b/frontend/src/hooks/api/workspace/types.ts index 8be9beed0d..b763eb8e3a 100644 --- a/frontend/src/hooks/api/workspace/types.ts +++ b/frontend/src/hooks/api/workspace/types.ts @@ -1,6 +1,11 @@ +import { OrderByDirection } from "@app/hooks/api/generic/types"; + +import { TProjectRole } from "../roles/types"; + export enum ProjectVersion { V1 = 1, - V2 = 2 + V2 = 2, + V3 = 3 } export enum ProjectUserMembershipTemporaryMode { @@ -14,9 +19,15 @@ export type Workspace = { orgId: string; version: ProjectVersion; upgradeStatus: string | null; + updatedAt: string; autoCapitalization: boolean; environments: WorkspaceEnv[]; + pitVersionLimit: number; + auditLogsRetentionDays: number; slug: string; + createdAt: string; + + roles?: TProjectRole[]; }; export type WorkspaceEnv = { @@ -45,9 +56,13 @@ export type TGetUpgradeProjectStatusDTO = { // mutation dto export type CreateWorkspaceDTO = { projectName: string; + kmsKeyId?: string; + template?: string; }; export type RenameWorkspaceDTO = { workspaceID: string; newWorkspaceName: string }; +export type UpdatePitVersionLimitDTO = { projectSlug: string; pitVersionLimit: number }; +export type UpdateAuditLogsRetentionDTO = { projectSlug: string; auditLogsRetentionDays: number }; export type ToggleAutoCapitalizationDTO = { workspaceID: string; state: boolean }; export type DeleteWorkspaceDTO = { workspaceID: string }; @@ -113,8 +128,8 @@ export type TUpdateWorkspaceIdentityRoleDTO = { }; export type TUpdateWorkspaceGroupRoleDTO = { - groupSlug: string; - projectSlug: string; + groupId: string; + projectId: string; roles: ( | { role: string; @@ -128,4 +143,17 @@ export type TUpdateWorkspaceGroupRoleDTO = { temporaryAccessStartTime: string; } )[]; -}; \ No newline at end of file +}; + +export type TListProjectIdentitiesDTO = { + workspaceId: string; + offset?: number; + limit?: number; + orderBy?: ProjectIdentityOrderBy; + orderDirection?: OrderByDirection; + search?: string; +}; + +export enum ProjectIdentityOrderBy { + Name = "name" +} diff --git a/frontend/src/hooks/index.ts b/frontend/src/hooks/index.ts index 578e9ffc6a..44d9ef5366 100644 --- a/frontend/src/hooks/index.ts +++ b/frontend/src/hooks/index.ts @@ -1,7 +1,9 @@ export { useDebounce } from "./useDebounce"; export { useLeaveConfirm } from "./useLeaveConfirm"; +export { usePagination } from "./usePagination"; export { usePersistentState } from "./usePersistentState"; export { usePopUp } from "./usePopUp"; +export { useResetPageHelper } from "./useResetPageHelper"; export { useSyntaxHighlight } from "./useSyntaxHighlight"; export { useTimedReset } from "./useTimedReset"; export { useToggle } from "./useToggle"; diff --git a/frontend/src/hooks/useDebounce.tsx b/frontend/src/hooks/useDebounce.tsx index 3187632106..8537ee3480 100644 --- a/frontend/src/hooks/useDebounce.tsx +++ b/frontend/src/hooks/useDebounce.tsx @@ -1,7 +1,10 @@ -import { useEffect, useState } from "react"; +import { Dispatch, SetStateAction, useEffect, useState } from "react"; // Ref: https://usehooks.com/useDebounce/ -export const useDebounce = (value: T, delay = 500): T => { +export const useDebounce = ( + value: T, + delay = 500 +): [T, Dispatch>] => { // State and setters for debounced value const [debouncedValue, setDebouncedValue] = useState(value); @@ -22,5 +25,5 @@ export const useDebounce = (value: T, delay = 500): T => { [value, delay] // Only re-call effect if value or delay changes ); - return debouncedValue; + return [debouncedValue, setDebouncedValue]; }; diff --git a/frontend/src/hooks/usePagination.tsx b/frontend/src/hooks/usePagination.tsx new file mode 100644 index 0000000000..3d3002b4cf --- /dev/null +++ b/frontend/src/hooks/usePagination.tsx @@ -0,0 +1,31 @@ +import { useState } from "react"; + +import { OrderByDirection } from "@app/hooks/api/generic/types"; +import { useDebounce } from "@app/hooks/useDebounce"; + +export const usePagination = (initialOrderBy: T) => { + const [page, setPage] = useState(1); + const [perPage, setPerPage] = useState(100); + const [orderDirection, setOrderDirection] = useState(OrderByDirection.ASC); + const [orderBy, setOrderBy] = useState(initialOrderBy); + const [search, setSearch] = useState(""); + const [debouncedSearch] = useDebounce(search); + + const offset = (page - 1) * perPage; + + return { + offset, + limit: perPage, + page, + setPage, + perPage, + setPerPage, + orderDirection, + setOrderDirection, + debouncedSearch, + search, + setSearch, + orderBy, + setOrderBy + }; +}; diff --git a/frontend/src/hooks/usePopUp.tsx b/frontend/src/hooks/usePopUp.tsx index e9d8257e34..28780db9e8 100644 --- a/frontend/src/hooks/usePopUp.tsx +++ b/frontend/src/hooks/usePopUp.tsx @@ -13,7 +13,7 @@ interface UsePopUpProps { export type UsePopUpState | UsePopUpProps[]> = { [P in T extends UsePopUpProps[] ? T[number]["name"] : T[number]]: { isOpen: boolean; - data?: unknown; + data?: any; }; }; diff --git a/frontend/src/hooks/useResetPageHelper.ts b/frontend/src/hooks/useResetPageHelper.ts new file mode 100644 index 0000000000..12478fd54f --- /dev/null +++ b/frontend/src/hooks/useResetPageHelper.ts @@ -0,0 +1,16 @@ +import { Dispatch, SetStateAction, useEffect } from "react"; + +export const useResetPageHelper = ({ + totalCount, + offset, + setPage +}: { + totalCount: number; + offset: number; + setPage: Dispatch>; +}) => { + useEffect(() => { + // reset page if no longer valid + if (totalCount <= offset) setPage(1); + }, [totalCount]); +}; diff --git a/frontend/src/hooks/useToggle.tsx b/frontend/src/hooks/useToggle.tsx index 83d73bcb18..ecc243ade0 100644 --- a/frontend/src/hooks/useToggle.tsx +++ b/frontend/src/hooks/useToggle.tsx @@ -8,6 +8,7 @@ type UseToggleReturn = [ on: VoidFn; off: VoidFn; toggle: VoidFn; + timedToggle: (timeout?: number) => void; } ]; @@ -26,5 +27,13 @@ export const useToggle = (initialState = false): UseToggleReturn => { setValue((prev) => (typeof isOpen === "boolean" ? isOpen : !prev)); }, []); - return [value, { on, off, toggle }]; + const timedToggle = useCallback((timeout = 2000) => { + setValue((prev) => !prev); + + setTimeout(() => { + setValue(false); + }, timeout); + }, []); + + return [value, { on, off, toggle, timedToggle }]; }; diff --git a/frontend/src/hooks/utils/index.ts b/frontend/src/hooks/utils/index.ts new file mode 100644 index 0000000000..db6645c41c --- /dev/null +++ b/frontend/src/hooks/utils/index.ts @@ -0,0 +1 @@ +export * from "./secrets-overview"; diff --git a/frontend/src/hooks/utils/secrets-overview.tsx b/frontend/src/hooks/utils/secrets-overview.tsx new file mode 100644 index 0000000000..b84fb3902c --- /dev/null +++ b/frontend/src/hooks/utils/secrets-overview.tsx @@ -0,0 +1,86 @@ +import { useCallback, useMemo } from "react"; + +import { DashboardProjectSecretsOverview } from "@app/hooks/api/dashboard/types"; + +export const useFolderOverview = (folders: DashboardProjectSecretsOverview["folders"]) => { + const folderNames = useMemo(() => { + const names = new Set(); + folders?.forEach((folder) => { + names.add(folder.name); + }); + return [...names]; + }, [folders]); + + const isFolderPresentInEnv = useCallback( + (name: string, env: string) => { + return Boolean( + folders?.find( + ({ name: folderName, environment }) => folderName === name && environment === env + ) + ); + }, + [folders] + ); + + const getFolderByNameAndEnv = useCallback( + (name: string, env: string) => { + return folders?.find( + ({ name: folderName, environment }) => folderName === name && environment === env + ); + }, + [folders] + ); + + return { folderNames, isFolderPresentInEnv, getFolderByNameAndEnv }; +}; + +export const useDynamicSecretOverview = ( + dynamicSecrets: DashboardProjectSecretsOverview["dynamicSecrets"] +) => { + const dynamicSecretNames = useMemo(() => { + const names = new Set(); + dynamicSecrets?.forEach((dynamicSecret) => { + names.add(dynamicSecret.name); + }); + return [...names]; + }, [dynamicSecrets]); + + const isDynamicSecretPresentInEnv = useCallback( + (name: string, env: string) => { + return Boolean( + dynamicSecrets?.find( + ({ name: dynamicSecretName, environment }) => + dynamicSecretName === name && environment === env + ) + ); + }, + [dynamicSecrets] + ); + + return { dynamicSecretNames, isDynamicSecretPresentInEnv }; +}; + +export const useSecretOverview = (secrets: DashboardProjectSecretsOverview["secrets"]) => { + const secKeys = useMemo(() => { + const keys = new Set(); + secrets?.forEach((secret) => keys.add(secret.key)); + return [...keys]; + }, [secrets]); + + const getEnvSecretKeyCount = useCallback( + (env: string) => { + return secrets?.filter((secret) => secret.env === env).length ?? 0; + }, + [secrets] + ); + + const getSecretByKey = useCallback( + (env: string, key: string) => { + const sec = secrets?.find((s) => s.env === env && s.key === key); + return sec; + }, + [secrets] + ); + + return { secKeys, getSecretByKey, getEnvSecretKeyCount }; +}; diff --git a/frontend/src/layouts/AdminLayout/AdminLayout.tsx b/frontend/src/layouts/AdminLayout/AdminLayout.tsx index f806b6e791..a8320a5253 100644 --- a/frontend/src/layouts/AdminLayout/AdminLayout.tsx +++ b/frontend/src/layouts/AdminLayout/AdminLayout.tsx @@ -157,7 +157,7 @@ export const AdminLayout = ({ children }: LayoutProps) => { {user?.superAdmin && ( - Admin Panel + Server Admin Console )} diff --git a/frontend/src/layouts/AppLayout/AppLayout.tsx b/frontend/src/layouts/AppLayout/AppLayout.tsx index 1b5df00379..d6164cba68 100644 --- a/frontend/src/layouts/AppLayout/AppLayout.tsx +++ b/frontend/src/layouts/AppLayout/AppLayout.tsx @@ -5,13 +5,13 @@ /* eslint-disable no-var */ /* eslint-disable func-names */ -import { useEffect, useMemo } from "react"; +import { useEffect, useMemo, useState } from "react"; import { Controller, useForm } from "react-hook-form"; import { useTranslation } from "react-i18next"; -import Image from "next/image"; import Link from "next/link"; import { useRouter } from "next/router"; import { faGithub, faSlack } from "@fortawesome/free-brands-svg-icons"; +import { faStar } from "@fortawesome/free-regular-svg-icons"; import { faAngleDown, faArrowLeft, @@ -21,9 +21,11 @@ import { faEnvelope, faInfinity, faInfo, + faInfoCircle, faMobile, faPlus, - faQuestion + faQuestion, + faStar as faSolidStar } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { yupResolver } from "@hookform/resolvers/yup"; @@ -34,7 +36,12 @@ import * as yup from "yup"; import { createNotification } from "@app/components/notifications"; import { OrgPermissionCan } from "@app/components/permissions"; import { tempLocalStorage } from "@app/components/utilities/checks/tempLocalStorage"; +import SecurityClient from "@app/components/utilities/SecurityClient"; import { + Accordion, + AccordionContent, + AccordionItem, + AccordionTrigger, Button, Checkbox, DropdownMenu, @@ -50,31 +57,41 @@ import { SelectItem, UpgradePlanModal } from "@app/components/v2"; -import { UpgradeOverlay } from "@app/components/v2/UpgradeOverlay"; import { OrgPermissionActions, OrgPermissionSubjects, useOrganization, + useOrgPermission, useSubscription, useUser, useWorkspace } from "@app/context"; -import { usePopUp } from "@app/hooks"; +import { usePopUp, useToggle } from "@app/hooks"; import { fetchOrgUsers, useAddUserToWsNonE2EE, useCreateWorkspace, useGetAccessRequestsCount, + useGetExternalKmsList, useGetOrgTrialUrl, useGetSecretApprovalRequestCount, - useGetUserAction, useLogoutUser, - useRegisterUserAction, useSelectOrganization } from "@app/hooks/api"; +import { MfaMethod } from "@app/hooks/api/auth/types"; +import { INTERNAL_KMS_KEY_ID } from "@app/hooks/api/kms/types"; +import { InfisicalProjectTemplate, useListProjectTemplates } from "@app/hooks/api/projectTemplates"; +import { Workspace } from "@app/hooks/api/types"; +import { useUpdateUserProjectFavorites } from "@app/hooks/api/users/mutation"; +import { useGetUserProjectFavorites } from "@app/hooks/api/users/queries"; +import { AuthMethod } from "@app/hooks/api/users/types"; +import { InsecureConnectionBanner } from "@app/layouts/AppLayout/components/InsecureConnectionBanner"; import { navigateUserToOrg } from "@app/views/Login/Login.utils"; +import { Mfa } from "@app/views/Login/Mfa"; import { CreateOrgModal } from "@app/views/Org/components"; +import { WishForm } from "./components/WishForm/WishForm"; + interface LayoutProps { children: React.ReactNode; } @@ -109,7 +126,9 @@ const formSchema = yup.object({ .label("Project Name") .trim() .max(64, "Too long, maximum length is 64 characters"), - addMembers: yup.bool().required().label("Add Members") + addMembers: yup.bool().required().label("Add Members"), + kmsKeyId: yup.string().label("KMS Key ID"), + template: yup.string().label("Project Template Name") }); type TAddProjectFormData = yup.InferType; @@ -122,14 +141,34 @@ export const AppLayout = ({ children }: LayoutProps) => { const { workspaces, currentWorkspace } = useWorkspace(); const { orgs, currentOrg } = useOrganization(); + const { data: projectFavorites } = useGetUserProjectFavorites(currentOrg?.id!); + const { mutateAsync: updateUserProjectFavorites } = useUpdateUserProjectFavorites(); + const [shouldShowMfa, toggleShowMfa] = useToggle(false); + const [requiredMfaMethod, setRequiredMfaMethod] = useState(MfaMethod.EMAIL); + const [mfaSuccessCallback, setMfaSuccessCallback] = useState<() => void>(() => {}); + + const workspacesWithFaveProp = useMemo( + () => + workspaces + .map((w): Workspace & { isFavorite: boolean } => ({ + ...w, + isFavorite: Boolean(projectFavorites?.includes(w.id)) + })) + .sort((a, b) => Number(b.isFavorite) - Number(a.isFavorite)), + [workspaces, projectFavorites] + ); + const { user } = useUser(); const { subscription } = useSubscription(); const workspaceId = currentWorkspace?.id || ""; const projectSlug = currentWorkspace?.slug || ""; - const { data: updateClosed } = useGetUserAction("december_update_closed"); const { data: secretApprovalReqCount } = useGetSecretApprovalRequestCount({ workspaceId }); const { data: accessApprovalRequestCount } = useGetAccessRequestsCount({ projectSlug }); + const { permission } = useOrgPermission(); + const { data: externalKmsList } = useGetExternalKmsList(currentOrg?.id!, { + enabled: permission.can(OrgPermissionActions.Read, OrgPermissionSubjects.Kms) + }); const pendingRequestsCount = useMemo(() => { return (secretApprovalReqCount?.open || 0) + (accessApprovalRequestCount?.pendingCount || 0); @@ -155,18 +194,16 @@ export const AppLayout = ({ children }: LayoutProps) => { reset, handleSubmit } = useForm({ - resolver: yupResolver(formSchema) + resolver: yupResolver(formSchema), + defaultValues: { + kmsKeyId: INTERNAL_KMS_KEY_ID + } }); const { t } = useTranslation(); - const registerUserAction = useRegisterUserAction(); const { mutateAsync: selectOrganization } = useSelectOrganization(); - const closeUpdate = async () => { - await registerUserAction.mutateAsync("december_update_closed"); - }; - const logout = useLogoutUser(); const logOutUser = async () => { try { @@ -179,10 +216,20 @@ export const AppLayout = ({ children }: LayoutProps) => { }; const changeOrg = async (orgId: string) => { - await selectOrganization({ + const { token, isMfaEnabled, mfaMethod } = await selectOrganization({ organizationId: orgId }); + if (isMfaEnabled) { + SecurityClient.setMfaToken(token); + if (mfaMethod) { + setRequiredMfaMethod(mfaMethod); + } + toggleShowMfa.on(); + setMfaSuccessCallback(() => () => changeOrg(orgId)); + return; + } + await navigateUserToOrg(router, orgId); }; @@ -203,6 +250,7 @@ export const AppLayout = ({ children }: LayoutProps) => { (!orgs?.map((org) => org.id)?.includes(router.query.id as string) && !router.asPath.includes("project") && !router.asPath.includes("personal") && + !router.asPath.includes("secret-scanning") && !router.asPath.includes("integration"))) ) { router.push(`/org/${currentOrg?.id}/overview`); @@ -233,7 +281,16 @@ export const AppLayout = ({ children }: LayoutProps) => { putUserInOrg(); }, [router.query.id]); - const onCreateProject = async ({ name, addMembers }: TAddProjectFormData) => { + const canReadProjectTemplates = permission.can( + OrgPermissionActions.Read, + OrgPermissionSubjects.ProjectTemplates + ); + + const { data: projectTemplates = [] } = useListProjectTemplates({ + enabled: Boolean(canReadProjectTemplates && subscription?.projectTemplates) + }); + + const onCreateProject = async ({ name, addMembers, kmsKeyId, template }: TAddProjectFormData) => { // type check if (!currentOrg) return; if (!user) return; @@ -243,16 +300,21 @@ export const AppLayout = ({ children }: LayoutProps) => { project: { id: newProjectId } } } = await createWs.mutateAsync({ - projectName: name + projectName: name, + kmsKeyId: kmsKeyId !== INTERNAL_KMS_KEY_ID ? kmsKeyId : undefined, + template }); if (addMembers) { const orgUsers = await fetchOrgUsers(currentOrg.id); await addUsersToProject.mutateAsync({ usernames: orgUsers - .map((member) => member.user.username) - .filter((username) => username !== user.username), - projectId: newProjectId + .filter( + (member) => member.user.username !== user.username && member.status === "accepted" + ) + .map((member) => member.user.username), + projectId: newProjectId, + orgId: currentOrg.id }); } @@ -262,23 +324,68 @@ export const AppLayout = ({ children }: LayoutProps) => { // eslint-disable-next-line no-promise-executor-return -- We do this because the function returns too fast, which sometimes causes an error when the user is redirected. await new Promise((resolve) => setTimeout(resolve, 2_000)); - createNotification({ text: "Workspace created", type: "success" }); + createNotification({ text: "Project created", type: "success" }); handlePopUpClose("addNewWs"); router.push(`/project/${newProjectId}/secrets/overview`); } catch (err) { console.error(err); - createNotification({ text: "Failed to create workspace", type: "error" }); + createNotification({ text: "Failed to create project", type: "error" }); } }; + const addProjectToFavorites = async (projectId: string) => { + try { + if (currentOrg?.id) { + await updateUserProjectFavorites({ + orgId: currentOrg?.id, + projectFavorites: [...(projectFavorites || []), projectId] + }); + } + } catch (err) { + createNotification({ + text: "Failed to add project to favorites.", + type: "error" + }); + } + }; + + const removeProjectFromFavorites = async (projectId: string) => { + try { + if (currentOrg?.id) { + await updateUserProjectFavorites({ + orgId: currentOrg?.id, + projectFavorites: [...(projectFavorites || []).filter((entry) => entry !== projectId)] + }); + } + } catch (err) { + createNotification({ + text: "Failed to remove project from favorites.", + type: "error" + }); + } + }; + + if (shouldShowMfa) { + return ( +
+ toggleShowMfa.off()} + /> +
+ ); + } + return ( <>
+ {!window.isSecureContext && }