mirror of
https://github.com/Infisical/infisical.git
synced 2026-01-06 22:23:53 -05:00
Merge branch 'main' into ENG-4257
This commit is contained in:
150
.env.dev.example
Normal file
150
.env.dev.example
Normal file
@@ -0,0 +1,150 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=VVHnGZ0w98WLgISK4XSJcagezuG6EWRFTk48KE4Y5Mw=
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
|
||||
|
||||
# Postgres creds
|
||||
POSTGRES_PASSWORD=infisical
|
||||
POSTGRES_USER=infisical
|
||||
POSTGRES_DB=infisical
|
||||
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_FROM_ADDRESS=
|
||||
SMTP_FROM_NAME=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Integration
|
||||
# Optional only if integration is used
|
||||
CLIENT_ID_HEROKU=
|
||||
CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITHUB_APP=
|
||||
CLIENT_SLUG_GITHUB_APP=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITHUB_APP=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
CLIENT_PRIVATE_KEY_GITHUB_APP=
|
||||
CLIENT_APP_ID_GITHUB_APP=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
SENTRY_DSN=
|
||||
|
||||
# Infisical Cloud-specific configs
|
||||
# Ignore - Not applicable for self-hosted version
|
||||
POSTHOG_HOST=
|
||||
POSTHOG_PROJECT_API_KEY=
|
||||
|
||||
# SSO-specific variables
|
||||
CLIENT_ID_GOOGLE_LOGIN=
|
||||
CLIENT_SECRET_GOOGLE_LOGIN=
|
||||
|
||||
CLIENT_ID_GITHUB_LOGIN=
|
||||
CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
||||
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
|
||||
# App Connections
|
||||
|
||||
# aws assume-role connection
|
||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# github oauth connection
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github app connection
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#gitlab app connection
|
||||
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github radar app connection
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connections
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET=
|
||||
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET=
|
||||
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET=
|
||||
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET=
|
||||
|
||||
# heroku app connection
|
||||
INF_APP_CONNECTION_HEROKU_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_HEROKU_OAUTH_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
|
||||
# kubernetes
|
||||
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false
|
||||
@@ -1,7 +1,7 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=VVHnGZ0w98WLgISK4XSJcagezuG6EWRFTk48KE4Y5Mw=
|
||||
ENCRYPTION_KEY=f13dbc92aaaf86fa7cb0ed8ac3265f47
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
@@ -21,7 +21,7 @@ REDIS_URL=redis://redis:6379
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
SITE_URL=http://localhost:80
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
|
||||
59
.github/workflows/release-k8-operator-helm.yml
vendored
59
.github/workflows/release-k8-operator-helm.yml
vendored
@@ -1,59 +0,0 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
|
||||
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
needs: test-helm
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
104
.github/workflows/release_docker_k8_operator.yaml
vendored
104
.github/workflows/release_docker_k8_operator.yaml
vendored
@@ -1,104 +0,0 @@
|
||||
name: Release K8 Operator Docker Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
release-image:
|
||||
name: Generate Helm Chart PR
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21
|
||||
|
||||
# Install binaries for helm generation
|
||||
- name: Install dependencies
|
||||
working-directory: k8-operator
|
||||
run: |
|
||||
make helmify
|
||||
make kustomize
|
||||
make controller-gen
|
||||
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
echo "Current git status:"
|
||||
git status
|
||||
echo ""
|
||||
echo "Modified files:"
|
||||
git diff --name-only
|
||||
|
||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
||||
if [ -z "$(git diff --name-only)" ]; then
|
||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Helm Chart PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
||||
delete-branch: true
|
||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
body: |
|
||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
||||
|
||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
||||
base: main
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
5
.github/workflows/run-backend-bdd-tests.yml
vendored
5
.github/workflows/run-backend-bdd-tests.yml
vendored
@@ -47,10 +47,13 @@ jobs:
|
||||
|
||||
- name: Output .env file and enable feature flags for BDD tests
|
||||
run: |
|
||||
cp .env.example .env
|
||||
cp .env.dev.example .env
|
||||
echo "ACME_DEVELOPMENT_MODE=true" >> .env
|
||||
echo "ACME_DEVELOPMENT_HTTP01_CHALLENGE_HOST_OVERRIDES={\"localhost\": \"host.docker.internal:8087\", \"infisical.com\": \"host.docker.internal:8087\", \"example.com\": \"host.docker.internal:8087\"}" >> .env
|
||||
echo "BDD_NOCK_API_ENABLED=true" >> .env
|
||||
# use Technitium DNS server for BDD tests
|
||||
echo "ACME_DNS_RESOLVE_RESOLVER_SERVERS_HOST_ENABLED=true" >> .env
|
||||
echo "ACME_DNS_RESOLVER_SERVERS=technitium" >> .env
|
||||
# Skip upstream validation, otherwise the ACME client for the upstream will try to
|
||||
# validate the DNS records, which will fail because the DNS records are not actually created.
|
||||
echo "ACME_SKIP_UPSTREAM_VALIDATION=true" >> .env
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
name: Run Helm Chart Tests for Secret Operator
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "helm-charts/secrets-operator/**"
|
||||
- ".github/workflows/run-helm-chart-tests-secret-operator.yml"
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
|
||||
2
.github/workflows/validate-pr-title.yml
vendored
2
.github/workflows/validate-pr-title.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
const title = context.payload.pull_request.title;
|
||||
|
||||
// Valid PR types based on pull_request_template.md
|
||||
const validTypes = ['fix', 'feature', 'improvement', 'breaking', 'docs', 'chore'];
|
||||
const validTypes = ['fix', 'feature', 'improvement', 'breaking', 'docs', 'chore', 'feat'];
|
||||
|
||||
// Regex pattern: type(optional-scope): short description
|
||||
// - Type must be one of the valid types
|
||||
|
||||
@@ -58,4 +58,5 @@ docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139
|
||||
docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx:private-key:62
|
||||
docs/documentation/platform/pki/certificate-syncs/chef.mdx:private-key:61
|
||||
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:246
|
||||
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:248
|
||||
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:248
|
||||
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:142
|
||||
2
Makefile
2
Makefile
@@ -14,7 +14,7 @@ up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
docker compose -f docker-compose.prod.yml up --build
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
|
||||
@@ -100,13 +100,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
|
||||
Linux/macOS:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.dev.example .env && docker compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Windows Command Prompt:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.dev.example .env && docker compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Create an account at `http://localhost:80`
|
||||
|
||||
@@ -23,6 +23,9 @@ CERT_CA_ID = os.environ.get("CERT_CA_ID")
|
||||
CERT_TEMPLATE_ID = os.environ.get("CERT_TEMPLATE_ID")
|
||||
AUTH_TOKEN = os.environ.get("INFISICAL_TOKEN")
|
||||
BOOTSTRAP_INFISICAL = int(os.environ.get("BOOTSTRAP_INFISICAL", 0))
|
||||
TECHNITIUM_URL = os.environ.get("TECHNITIUM_URL", "http://localhost:5380")
|
||||
TECHNITIUM_USER = os.environ.get("TECHNITIUM_USER", "admin")
|
||||
TECHNITIUM_PASSWORD = os.environ.get("TECHNITIUM_PASSWORD", "infisical")
|
||||
|
||||
|
||||
# Called mostly from a CI to setup the new Infisical instance to get it ready for BDD tests
|
||||
@@ -188,6 +191,9 @@ def before_all(context: Context):
|
||||
base_vars = {
|
||||
"BASE_URL": BASE_URL,
|
||||
"PEBBLE_URL": PEBBLE_URL,
|
||||
"TECHNITIUM_URL": TECHNITIUM_URL,
|
||||
"TECHNITIUM_USER": TECHNITIUM_USER,
|
||||
"TECHNITIUM_PASSWORD": TECHNITIUM_PASSWORD,
|
||||
}
|
||||
if BOOTSTRAP_INFISICAL:
|
||||
details = bootstrap_infisical(context)
|
||||
@@ -206,6 +212,7 @@ def before_all(context: Context):
|
||||
}
|
||||
context._initial_vars = vars
|
||||
context.http_client = httpx.Client(base_url=BASE_URL)
|
||||
context.technitium_http_client = httpx.Client(base_url=TECHNITIUM_URL)
|
||||
|
||||
|
||||
def before_scenario(context: Context, scenario: typing.Any):
|
||||
|
||||
@@ -19,13 +19,17 @@ Feature: Authorization
|
||||
And the value order.authorizations[0].body with jq ".challenges | map(pick(.type, .status)) | sort_by(.type)" should be equal to json
|
||||
"""
|
||||
[
|
||||
{
|
||||
"type": "dns-01",
|
||||
"status": "pending"
|
||||
},
|
||||
{
|
||||
"type": "http-01",
|
||||
"status": "pending"
|
||||
}
|
||||
]
|
||||
"""
|
||||
And the value order.authorizations[0].body with jq ".challenges | map(.status) | sort" should be equal to ["pending"]
|
||||
And the value order.authorizations[0].body with jq ".challenges | map(.status) | sort" should be equal to ["pending", "pending"]
|
||||
And the value order.authorizations[0].body with jq ".identifier" should be equal to json
|
||||
"""
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
Feature: Challenge
|
||||
|
||||
Scenario: Validate challenge
|
||||
Scenario: Validate challenge with HTTP-01
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
@@ -22,6 +22,28 @@ Feature: Challenge
|
||||
And I parse the full-chain certificate from order finalized_order as cert
|
||||
And the value cert with jq ".subject.common_name" should be equal to "localhost"
|
||||
|
||||
Scenario: Validate challenge with DNS-01
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
When I create certificate signing request as csr
|
||||
Then I add names to certificate signing request csr
|
||||
"""
|
||||
{
|
||||
"COMMON_NAME": "example.com"
|
||||
}
|
||||
"""
|
||||
And I create a RSA private key pair as cert_key
|
||||
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
|
||||
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
|
||||
And I select challenge with type dns-01 for domain example.com from order in order as challenge
|
||||
Then I add domain example.com challenge response DNS records for challenge
|
||||
And I tell ACME server that challenge is ready to be verified
|
||||
And I poll and finalize the ACME order order as finalized_order
|
||||
And the value finalized_order.body with jq ".status" should be equal to "valid"
|
||||
And I parse the full-chain certificate from order finalized_order as cert
|
||||
And the value cert with jq ".subject.common_name" should be equal to "example.com"
|
||||
|
||||
Scenario: Validate challenge with retry
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
|
||||
@@ -192,3 +214,28 @@ Feature: Challenge
|
||||
And the value response with jq ".status" should be equal to 400
|
||||
And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:badCSR"
|
||||
And the value response with jq ".detail" should be equal to "Invalid CSR: Common name + SANs mismatch with order identifiers"
|
||||
|
||||
Scenario: Get certificate without passing challenge when skip DNS ownership verification is enabled
|
||||
Given I create an ACME profile with config as "acme_profile"
|
||||
"""
|
||||
{
|
||||
"skipDnsOwnershipVerification": true
|
||||
}
|
||||
"""
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
When I create certificate signing request as csr
|
||||
Then I add names to certificate signing request csr
|
||||
"""
|
||||
{
|
||||
"COMMON_NAME": "localhost"
|
||||
}
|
||||
"""
|
||||
And I create a RSA private key pair as cert_key
|
||||
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
|
||||
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
|
||||
And the value order.body with jq ".status" should be equal to "ready"
|
||||
And I poll and finalize the ACME order order as finalized_order
|
||||
And the value finalized_order.body with jq ".status" should be equal to "valid"
|
||||
And I parse the full-chain certificate from order finalized_order as cert
|
||||
And the value cert with jq ".subject.common_name" should be equal to "localhost"
|
||||
|
||||
@@ -266,6 +266,44 @@ def step_impl(context: Context, ca_id: str, template_id: str, profile_var: str):
|
||||
)
|
||||
|
||||
|
||||
@given('I create an ACME profile with config as "{profile_var}"')
|
||||
def step_impl(context: Context, profile_var: str):
|
||||
profile_slug = faker.slug()
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
acme_config = replace_vars(json.loads(context.text), context.vars)
|
||||
response = context.http_client.post(
|
||||
"/api/v1/cert-manager/certificate-profiles",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json={
|
||||
"projectId": context.vars["PROJECT_ID"],
|
||||
"slug": profile_slug,
|
||||
"description": "ACME Profile created by BDD test",
|
||||
"enrollmentType": "acme",
|
||||
"caId": context.vars["CERT_CA_ID"],
|
||||
"certificateTemplateId": context.vars["CERT_TEMPLATE_ID"],
|
||||
"acmeConfig": acme_config,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
resp_json = response.json()
|
||||
profile_id = resp_json["certificateProfile"]["id"]
|
||||
kid = profile_id
|
||||
|
||||
response = context.http_client.get(
|
||||
f"/api/v1/cert-manager/certificate-profiles/{profile_id}/acme/eab-secret/reveal",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
)
|
||||
response.raise_for_status()
|
||||
resp_json = response.json()
|
||||
secret = resp_json["eabSecret"]
|
||||
|
||||
context.vars[profile_var] = AcmeProfile(
|
||||
profile_id,
|
||||
eab_kid=kid,
|
||||
eab_secret=secret,
|
||||
)
|
||||
|
||||
|
||||
@given('I have an ACME cert profile with external ACME CA as "{profile_var}"')
|
||||
def step_impl(context: Context, profile_var: str):
|
||||
profile_id = context.vars.get("PROFILE_ID")
|
||||
@@ -990,6 +1028,58 @@ def step_impl(context: Context, var_path: str, hostname: str):
|
||||
serve_challenges(context=context, challenges=[challenge])
|
||||
|
||||
|
||||
@then("I add domain {domain} challenge response DNS records for {var_path}")
|
||||
def step_impl(context: Context, domain: str, var_path: str):
|
||||
client = context.technitium_http_client
|
||||
challenge = eval_var(context, var_path, as_json=False)
|
||||
|
||||
zone = domain
|
||||
domain = f"{challenge.chall.LABEL}.{domain}"
|
||||
value = challenge.chall.validation(context.acme_client.net.key)
|
||||
|
||||
resp = client.post(
|
||||
"/api/user/login",
|
||||
data={
|
||||
"user": context.vars["TECHNITIUM_USER"],
|
||||
"pass": context.vars["TECHNITIUM_PASSWORD"],
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
||||
token = resp.json()["token"]
|
||||
resp = client.post(
|
||||
"/api/zones/create",
|
||||
params=dict(
|
||||
token=token,
|
||||
zone=zone,
|
||||
type="Primary",
|
||||
),
|
||||
)
|
||||
resp.raise_for_status()
|
||||
error_msg = resp.json().get("errorMessage")
|
||||
if error_msg is not None and not error_msg.startswith("Zone already exists:"):
|
||||
raise RuntimeError(f"Unexpected error while creating zone {zone}: {error_msg}")
|
||||
|
||||
resp = client.post(
|
||||
"/api/zones/records/add",
|
||||
params=dict(
|
||||
token=token,
|
||||
zone=zone,
|
||||
domain=domain,
|
||||
type="TXT",
|
||||
text=value,
|
||||
),
|
||||
)
|
||||
resp.raise_for_status()
|
||||
error_msg = resp.json().get("errorMessage")
|
||||
if error_msg is not None and not error_msg.startswith(
|
||||
"Cannot add record: record already exists"
|
||||
):
|
||||
raise RuntimeError(
|
||||
f"Unexpected error while creating TXT record {domain} for zone {zone}: {error_msg}"
|
||||
)
|
||||
|
||||
|
||||
@then("I tell ACME server that {var_path} is ready to be verified")
|
||||
def step_impl(context: Context, var_path: str):
|
||||
challenge = eval_var(context, var_path, as_json=False)
|
||||
|
||||
@@ -98,9 +98,11 @@ const main = async () => {
|
||||
(el) =>
|
||||
!el.tableName.includes("_migrations") &&
|
||||
!el.tableName.includes("audit_logs_") &&
|
||||
!el.tableName.includes("certificate_requests_") &&
|
||||
!el.tableName.includes("user_notifications_") &&
|
||||
!el.tableName.includes("active_locks") &&
|
||||
el.tableName !== "intermediate_audit_logs"
|
||||
el.tableName !== "intermediate_audit_logs" &&
|
||||
el.tableName !== "intermediate_certificate_requests"
|
||||
);
|
||||
|
||||
for (let i = 0; i < tables.length; i += 1) {
|
||||
|
||||
8
backend/src/@types/knex.d.ts
vendored
8
backend/src/@types/knex.d.ts
vendored
@@ -170,6 +170,9 @@ import {
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityGroupMembership,
|
||||
TIdentityGroupMembershipInsert,
|
||||
TIdentityGroupMembershipUpdate,
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate,
|
||||
@@ -857,6 +860,11 @@ declare module "knex/types/tables" {
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.IdentityGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGroupMembership,
|
||||
TIdentityGroupMembershipInsert,
|
||||
TIdentityGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityGroupMembership))) {
|
||||
await knex.schema.createTable(TableName.IdentityGroupMembership, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("identityId").notNullable();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.unique(["identityId", "groupId"]);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityGroupMembership);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityGroupMembership)) {
|
||||
await knex.schema.dropTable(TableName.IdentityGroupMembership);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityGroupMembership);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { dropConstraintIfExists } from "./utils/dropConstraintIfExists";
|
||||
|
||||
const FOREIGN_KEY_CONSTRAINT_NAME = "certificate_requests_acme_order_id_fkey";
|
||||
const INDEX_NAME = "certificate_requests_acme_order_id_idx";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateRequests)) {
|
||||
const hasAcmeOrderId = await knex.schema.hasColumn(TableName.CertificateRequests, "acmeOrderId");
|
||||
|
||||
if (!hasAcmeOrderId) {
|
||||
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
|
||||
t.uuid("acmeOrderId").nullable();
|
||||
t.foreign("acmeOrderId", FOREIGN_KEY_CONSTRAINT_NAME)
|
||||
.references("id")
|
||||
.inTable(TableName.PkiAcmeOrder)
|
||||
.onDelete("SET NULL");
|
||||
t.index("acmeOrderId", INDEX_NAME);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateRequests)) {
|
||||
const hasAcmeOrderId = await knex.schema.hasColumn(TableName.CertificateRequests, "acmeOrderId");
|
||||
|
||||
if (hasAcmeOrderId) {
|
||||
await dropConstraintIfExists(TableName.CertificateRequests, FOREIGN_KEY_CONSTRAINT_NAME, knex);
|
||||
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
|
||||
t.dropIndex("acmeOrderId", INDEX_NAME);
|
||||
t.dropColumn("acmeOrderId");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeEnrollmentConfig)) {
|
||||
if (!(await knex.schema.hasColumn(TableName.PkiAcmeEnrollmentConfig, "skipDnsOwnershipVerification"))) {
|
||||
await knex.schema.alterTable(TableName.PkiAcmeEnrollmentConfig, (t) => {
|
||||
t.boolean("skipDnsOwnershipVerification").defaultTo(false).notNullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeEnrollmentConfig)) {
|
||||
if (await knex.schema.hasColumn(TableName.PkiAcmeEnrollmentConfig, "skipDnsOwnershipVerification")) {
|
||||
await knex.schema.alterTable(TableName.PkiAcmeEnrollmentConfig, (t) => {
|
||||
t.dropColumn("skipDnsOwnershipVerification");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,171 @@
|
||||
/* eslint-disable no-console */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger } from "../utils";
|
||||
|
||||
const INTERMEDIATE_CERTIFICATE_REQUESTS_TABLE = "intermediate_certificate_requests";
|
||||
|
||||
const formatPartitionDate = (date: Date) => {
|
||||
const year = date.getFullYear();
|
||||
const month = String(date.getMonth() + 1).padStart(2, "0");
|
||||
const day = String(date.getDate()).padStart(2, "0");
|
||||
|
||||
return `${year}-${month}-${day}`;
|
||||
};
|
||||
|
||||
const createCertificateRequestPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.CertificateRequests}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.CertificateRequests} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
);
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// Check if table is already partitioned by looking for partition information
|
||||
const partitionInfo: { rows: { schemaname: string; tablename: string }[] } = await knex.raw(
|
||||
`
|
||||
SELECT schemaname, tablename
|
||||
FROM pg_tables
|
||||
WHERE tablename LIKE '${TableName.CertificateRequests}_%'
|
||||
AND schemaname = 'public'
|
||||
`
|
||||
);
|
||||
|
||||
if (partitionInfo.rows.length > 0) {
|
||||
console.info("Certificate requests table is already partitioned, skipping migration...");
|
||||
return;
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.CertificateRequests)) {
|
||||
console.info("Converting existing certificate_requests table to partitioned table...");
|
||||
|
||||
// Drop primary key constraint
|
||||
console.info("Dropping primary key of certificate_requests table...");
|
||||
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
|
||||
t.dropPrimary();
|
||||
});
|
||||
|
||||
// Get all indices of the certificate_requests table and drop them
|
||||
const indexNames: { rows: { indexname: string }[] } = await knex.raw(
|
||||
`
|
||||
SELECT indexname
|
||||
FROM pg_indexes
|
||||
WHERE tablename = '${TableName.CertificateRequests}'
|
||||
`
|
||||
);
|
||||
|
||||
console.log(
|
||||
"Deleting existing certificate_requests indices:",
|
||||
indexNames.rows.map((e) => e.indexname)
|
||||
);
|
||||
|
||||
for await (const row of indexNames.rows) {
|
||||
await knex.raw(`DROP INDEX IF EXISTS ??`, [row.indexname]);
|
||||
}
|
||||
|
||||
// Rename existing table to intermediate name
|
||||
console.log("Renaming certificate_requests table to intermediate name");
|
||||
await knex.schema.renameTable(TableName.CertificateRequests, INTERMEDIATE_CERTIFICATE_REQUESTS_TABLE);
|
||||
|
||||
// Create new partitioned table with same schema - MUST MATCH EXACTLY the original table
|
||||
const createTableSql = knex.schema
|
||||
.createTable(TableName.CertificateRequests, (t) => {
|
||||
t.uuid("id").defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.string("status").notNullable();
|
||||
t.string("projectId").notNullable();
|
||||
t.uuid("profileId").nullable();
|
||||
t.uuid("caId").nullable();
|
||||
t.uuid("certificateId").nullable();
|
||||
t.text("csr").nullable();
|
||||
t.string("commonName").nullable();
|
||||
t.text("altNames").nullable();
|
||||
t.specificType("keyUsages", "text[]").nullable();
|
||||
t.specificType("extendedKeyUsages", "text[]").nullable();
|
||||
t.datetime("notBefore").nullable();
|
||||
t.datetime("notAfter").nullable();
|
||||
t.string("keyAlgorithm").nullable();
|
||||
t.string("signatureAlgorithm").nullable();
|
||||
t.text("errorMessage").nullable();
|
||||
t.text("metadata").nullable();
|
||||
t.uuid("acmeOrderId").nullable();
|
||||
|
||||
t.primary(["id", "createdAt"]);
|
||||
})
|
||||
.toString();
|
||||
|
||||
console.info("Creating partitioned certificate_requests table...");
|
||||
await knex.schema.raw(`${createTableSql} PARTITION BY RANGE ("createdAt")`);
|
||||
|
||||
console.log("Adding indices...");
|
||||
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.foreign("profileId").references("id").inTable(TableName.PkiCertificateProfile).onDelete("SET NULL");
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("SET NULL");
|
||||
t.foreign("certificateId").references("id").inTable(TableName.Certificate).onDelete("SET NULL");
|
||||
|
||||
t.index("status");
|
||||
t.index(["projectId", "status"]);
|
||||
t.index(["projectId", "createdAt"]);
|
||||
t.index("acmeOrderId", "certificate_requests_acme_order_id_idx");
|
||||
});
|
||||
|
||||
// Create default partition
|
||||
console.log("Creating default partition...");
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${TableName.CertificateRequests}_default PARTITION OF ${TableName.CertificateRequests} DEFAULT`
|
||||
);
|
||||
|
||||
const nextDate = new Date();
|
||||
nextDate.setDate(nextDate.getDate() + 1);
|
||||
const nextDateStr = formatPartitionDate(nextDate);
|
||||
|
||||
console.log("Attaching existing certificate_requests table as a partition...");
|
||||
await knex.schema.raw(
|
||||
`
|
||||
ALTER TABLE ${INTERMEDIATE_CERTIFICATE_REQUESTS_TABLE} ADD CONSTRAINT certificate_requests_old
|
||||
CHECK ( "createdAt" < DATE '${nextDateStr}' );
|
||||
|
||||
ALTER TABLE ${TableName.CertificateRequests} ATTACH PARTITION ${INTERMEDIATE_CERTIFICATE_REQUESTS_TABLE}
|
||||
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
|
||||
`
|
||||
);
|
||||
|
||||
// Create partition from next day until end of month
|
||||
console.log("Creating certificate_requests partitions ahead of time... next date:", nextDateStr);
|
||||
await createCertificateRequestPartition(
|
||||
knex,
|
||||
nextDate,
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + 1, 1)
|
||||
);
|
||||
|
||||
// Create partitions 20 years ahead for certificate requests
|
||||
const partitionMonths = 20 * 12;
|
||||
const partitionPromises: Promise<void>[] = [];
|
||||
for (let x = 1; x <= partitionMonths; x += 1) {
|
||||
partitionPromises.push(
|
||||
createCertificateRequestPartition(
|
||||
knex,
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(partitionPromises);
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateRequests);
|
||||
console.log("Certificate requests partition migration complete");
|
||||
} else {
|
||||
console.log("Certificate requests table does not exist, skipping partitioning migration");
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// skip
|
||||
}
|
||||
@@ -26,7 +26,8 @@ export const CertificateRequestsSchema = z.object({
|
||||
keyAlgorithm: z.string().nullable().optional(),
|
||||
signatureAlgorithm: z.string().nullable().optional(),
|
||||
errorMessage: z.string().nullable().optional(),
|
||||
metadata: z.string().nullable().optional()
|
||||
metadata: z.string().nullable().optional(),
|
||||
acmeOrderId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateRequests = z.infer<typeof CertificateRequestsSchema>;
|
||||
|
||||
22
backend/src/db/schemas/identity-group-membership.ts
Normal file
22
backend/src/db/schemas/identity-group-membership.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityGroupMembershipSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
identityId: z.string().uuid(),
|
||||
groupId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityGroupMembership = z.infer<typeof IdentityGroupMembershipSchema>;
|
||||
export type TIdentityGroupMembershipInsert = Omit<z.input<typeof IdentityGroupMembershipSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityGroupMembershipUpdate = Partial<
|
||||
Omit<z.input<typeof IdentityGroupMembershipSchema>, TImmutableDBKeys>
|
||||
>;
|
||||
@@ -55,6 +55,7 @@ export * from "./identity-alicloud-auths";
|
||||
export * from "./identity-aws-auths";
|
||||
export * from "./identity-azure-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
export * from "./identity-group-membership";
|
||||
export * from "./identity-jwt-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
|
||||
@@ -42,6 +42,7 @@ export enum TableName {
|
||||
GroupProjectMembershipRole = "group_project_membership_roles",
|
||||
ExternalGroupOrgRoleMapping = "external_group_org_role_mappings",
|
||||
UserGroupMembership = "user_group_membership",
|
||||
IdentityGroupMembership = "identity_group_membership",
|
||||
UserAliases = "user_aliases",
|
||||
UserEncryptionKey = "user_encryption_keys",
|
||||
AuthTokens = "auth_tokens",
|
||||
|
||||
@@ -13,7 +13,8 @@ export const PkiAcmeEnrollmentConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedEabSecret: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
skipDnsOwnershipVerification: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TPkiAcmeEnrollmentConfigs = z.infer<typeof PkiAcmeEnrollmentConfigsSchema>;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
@@ -48,14 +49,35 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { data, lease, dynamicSecret } = await server.services.dynamicSecretLease.create({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.dynamicSecretName,
|
||||
...req.body
|
||||
const { data, lease, dynamicSecret, projectId, environment, secretPath } =
|
||||
await server.services.dynamicSecretLease.create({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.dynamicSecretName,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_DYNAMIC_SECRET_LEASE,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecret.name,
|
||||
dynamicSecretType: dynamicSecret.type,
|
||||
dynamicSecretId: dynamicSecret.id,
|
||||
projectId,
|
||||
environment,
|
||||
secretPath,
|
||||
leaseId: lease.id,
|
||||
leaseExternalEntityId: lease.externalEntityId,
|
||||
leaseExpireAt: lease.expireAt
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { lease, data, dynamicSecret };
|
||||
}
|
||||
});
|
||||
@@ -92,14 +114,36 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const lease = await server.services.dynamicSecretLease.revokeLease({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
leaseId: req.params.leaseId,
|
||||
...req.body
|
||||
const { lease, dynamicSecret, projectId, environment, secretPath } =
|
||||
await server.services.dynamicSecretLease.revokeLease({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
leaseId: req.params.leaseId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_DYNAMIC_SECRET_LEASE,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecret.name,
|
||||
dynamicSecretType: dynamicSecret.type,
|
||||
dynamicSecretId: dynamicSecret.id,
|
||||
leaseId: lease.id,
|
||||
leaseExternalEntityId: lease.externalEntityId,
|
||||
leaseStatus: lease.status,
|
||||
environment,
|
||||
secretPath,
|
||||
projectId,
|
||||
isForced: req.body.isForced
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { lease };
|
||||
}
|
||||
});
|
||||
@@ -147,14 +191,35 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const lease = await server.services.dynamicSecretLease.renewLease({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
leaseId: req.params.leaseId,
|
||||
...req.body
|
||||
const { lease, dynamicSecret, projectId, environment, secretPath } =
|
||||
await server.services.dynamicSecretLease.renewLease({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
leaseId: req.params.leaseId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.RENEW_DYNAMIC_SECRET_LEASE,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecret.name,
|
||||
dynamicSecretType: dynamicSecret.type,
|
||||
dynamicSecretId: dynamicSecret.id,
|
||||
leaseId: lease.id,
|
||||
leaseExternalEntityId: lease.externalEntityId,
|
||||
newLeaseExpireAt: lease.expireAt,
|
||||
environment,
|
||||
secretPath,
|
||||
projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { lease };
|
||||
}
|
||||
});
|
||||
@@ -191,15 +256,41 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const lease = await server.services.dynamicSecretLease.getLeaseDetails({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
leaseId: req.params.leaseId,
|
||||
...req.query
|
||||
const { lease, dynamicSecret, projectId, environment, secretPath } =
|
||||
await server.services.dynamicSecretLease.getLeaseDetails({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
leaseId: req.params.leaseId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_DYNAMIC_SECRET_LEASE,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecret.name,
|
||||
dynamicSecretId: dynamicSecret.id,
|
||||
dynamicSecretType: dynamicSecret.type,
|
||||
leaseId: lease.id,
|
||||
leaseExternalEntityId: lease.externalEntityId,
|
||||
leaseExpireAt: lease.expireAt,
|
||||
environment,
|
||||
secretPath,
|
||||
projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
return { lease };
|
||||
|
||||
return {
|
||||
lease: {
|
||||
...lease,
|
||||
dynamicSecret
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/providers/models";
|
||||
import { ApiDocsTags, DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
@@ -98,6 +99,27 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dynamicSecretCfg.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_DYNAMIC_SECRET,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecretCfg.name,
|
||||
dynamicSecretType: dynamicSecretCfg.type,
|
||||
dynamicSecretId: dynamicSecretCfg.id,
|
||||
defaultTTL: dynamicSecretCfg.defaultTTL,
|
||||
maxTTL: dynamicSecretCfg.maxTTL,
|
||||
gatewayV2Id: dynamicSecretCfg.gatewayV2Id,
|
||||
usernameTemplate: dynamicSecretCfg.usernameTemplate,
|
||||
environment: dynamicSecretCfg.environment,
|
||||
secretPath: dynamicSecretCfg.secretPath,
|
||||
projectId: dynamicSecretCfg.projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dynamicSecret: dynamicSecretCfg };
|
||||
}
|
||||
});
|
||||
@@ -160,18 +182,36 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const dynamicSecretCfg = await server.services.dynamicSecret.updateByName({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.params.name,
|
||||
path: req.body.path,
|
||||
projectSlug: req.body.projectSlug,
|
||||
environmentSlug: req.body.environmentSlug,
|
||||
...req.body.data
|
||||
const { dynamicSecret, updatedFields, projectId, environment, secretPath } =
|
||||
await server.services.dynamicSecret.updateByName({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.params.name,
|
||||
path: req.body.path,
|
||||
projectSlug: req.body.projectSlug,
|
||||
environmentSlug: req.body.environmentSlug,
|
||||
...req.body.data
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_DYNAMIC_SECRET,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecret.name,
|
||||
dynamicSecretType: dynamicSecret.type,
|
||||
dynamicSecretId: dynamicSecret.id,
|
||||
environment,
|
||||
secretPath,
|
||||
projectId,
|
||||
updatedFields
|
||||
}
|
||||
}
|
||||
});
|
||||
return { dynamicSecret: dynamicSecretCfg };
|
||||
return { dynamicSecret };
|
||||
}
|
||||
});
|
||||
|
||||
@@ -209,6 +249,23 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
name: req.params.name,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dynamicSecretCfg.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_DYNAMIC_SECRET,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecretCfg.name,
|
||||
dynamicSecretType: dynamicSecretCfg.type,
|
||||
dynamicSecretId: dynamicSecretCfg.id,
|
||||
environment: dynamicSecretCfg.environment,
|
||||
secretPath: dynamicSecretCfg.secretPath,
|
||||
projectId: dynamicSecretCfg.projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dynamicSecret: dynamicSecretCfg };
|
||||
}
|
||||
});
|
||||
@@ -249,6 +306,22 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
...req.query
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dynamicSecretCfg.projectId,
|
||||
event: {
|
||||
type: EventType.GET_DYNAMIC_SECRET,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecretCfg.name,
|
||||
dynamicSecretType: dynamicSecretCfg.type,
|
||||
dynamicSecretId: dynamicSecretCfg.id,
|
||||
environment: dynamicSecretCfg.environment,
|
||||
secretPath: dynamicSecretCfg.secretPath,
|
||||
projectId: dynamicSecretCfg.projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dynamicSecret: dynamicSecretCfg };
|
||||
}
|
||||
});
|
||||
@@ -275,14 +348,29 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const dynamicSecretCfgs = await server.services.dynamicSecret.listDynamicSecretsByEnv({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
const { dynamicSecrets, environment, secretPath, projectId } =
|
||||
await server.services.dynamicSecret.listDynamicSecretsByEnv({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.LIST_DYNAMIC_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
return { dynamicSecrets: dynamicSecretCfgs };
|
||||
|
||||
return { dynamicSecrets };
|
||||
}
|
||||
});
|
||||
|
||||
@@ -316,14 +404,33 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const leases = await server.services.dynamicSecretLease.listLeases({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.params.name,
|
||||
...req.query
|
||||
const { leases, dynamicSecret, projectId, environment, secretPath } =
|
||||
await server.services.dynamicSecretLease.listLeases({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.params.name,
|
||||
...req.query
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.LIST_DYNAMIC_SECRET_LEASES,
|
||||
metadata: {
|
||||
dynamicSecretName: dynamicSecret.name,
|
||||
dynamicSecretType: dynamicSecret.type,
|
||||
dynamicSecretId: dynamicSecret.id,
|
||||
environment,
|
||||
secretPath,
|
||||
projectId,
|
||||
leaseCount: leases.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { leases };
|
||||
}
|
||||
});
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { deterministicStringify } from "@app/lib/fn/object";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -88,9 +89,11 @@ export const registerExternalKmsEndpoints = <
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(configuration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration, credentialsHash } };
|
||||
}
|
||||
@@ -153,9 +156,12 @@ export const registerExternalKmsEndpoints = <
|
||||
external: { providerInput: externalKmsConfiguration, ...externalKmsData },
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(externalKmsConfiguration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration: externalKmsConfiguration, credentialsHash } };
|
||||
}
|
||||
@@ -222,9 +228,12 @@ export const registerExternalKmsEndpoints = <
|
||||
external: { providerInput: externalKmsConfiguration, ...externalKmsData },
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(externalKmsConfiguration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration: externalKmsConfiguration, credentialsHash } };
|
||||
}
|
||||
@@ -277,9 +286,12 @@ export const registerExternalKmsEndpoints = <
|
||||
external: { providerInput: configuration, ...externalKmsData },
|
||||
...rest
|
||||
} = externalKms;
|
||||
|
||||
const credentialsToHash = deterministicStringify(configuration.credential);
|
||||
|
||||
const credentialsHash = crypto.nativeCrypto
|
||||
.createHash("sha256")
|
||||
.update(externalKmsData.encryptedProviderInputs)
|
||||
.update(Buffer.from(credentialsToHash))
|
||||
.digest("hex");
|
||||
|
||||
return { ...rest, externalKms: { ...externalKmsData, configuration, credentialsHash } };
|
||||
|
||||
@@ -1,18 +1,27 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GroupsSchema, OrgMembershipRole, ProjectsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { GroupsSchema, IdentitiesSchema, OrgMembershipRole, ProjectsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import {
|
||||
EFilterReturnedProjects,
|
||||
EFilterReturnedUsers,
|
||||
EGroupProjectsOrderBy
|
||||
FilterMemberType,
|
||||
FilterReturnedMachineIdentities,
|
||||
FilterReturnedProjects,
|
||||
FilterReturnedUsers,
|
||||
GroupMembersOrderBy,
|
||||
GroupProjectsOrderBy
|
||||
} from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS } from "@app/lib/api-docs";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const GroupIdentityResponseSchema = IdentitiesSchema.pick({
|
||||
id: true,
|
||||
name: true
|
||||
});
|
||||
|
||||
export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
@@ -190,8 +199,15 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(FilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -202,12 +218,10 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
lastName: true,
|
||||
id: true
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
)
|
||||
.extend({
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
@@ -227,6 +241,134 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/machine-identities",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.LIST_MACHINE_IDENTITIES.id)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_MACHINE_IDENTITIES.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_MACHINE_IDENTITIES.limit),
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MACHINE_IDENTITIES.search),
|
||||
filter: z
|
||||
.nativeEnum(FilterReturnedMachineIdentities)
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MACHINE_IDENTITIES.filterMachineIdentities)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
machineIdentities: GroupIdentityResponseSchema.extend({
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
}).array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { machineIdentities, totalCount } = await server.services.group.listGroupMachineIdentities({
|
||||
id: req.params.id,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { machineIdentities, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/members",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.LIST_MEMBERS.id)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_MEMBERS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_MEMBERS.limit),
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MEMBERS.search),
|
||||
orderBy: z
|
||||
.nativeEnum(GroupMembersOrderBy)
|
||||
.default(GroupMembersOrderBy.Name)
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MEMBERS.orderBy),
|
||||
orderDirection: z.nativeEnum(OrderByDirection).optional().describe(GROUPS.LIST_MEMBERS.orderDirection),
|
||||
memberTypeFilter: z
|
||||
.union([z.nativeEnum(FilterMemberType), z.array(z.nativeEnum(FilterMemberType))])
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_MEMBERS.memberTypeFilter)
|
||||
.transform((val) => {
|
||||
if (!val) return undefined;
|
||||
return Array.isArray(val) ? val : [val];
|
||||
})
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
members: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({
|
||||
id: z.string(),
|
||||
joinedGroupAt: z.date().nullable(),
|
||||
type: z.literal("user"),
|
||||
user: UsersSchema.pick({ id: true, firstName: true, lastName: true, email: true, username: true })
|
||||
}),
|
||||
z.object({
|
||||
id: z.string(),
|
||||
joinedGroupAt: z.date().nullable(),
|
||||
type: z.literal("machineIdentity"),
|
||||
machineIdentity: GroupIdentityResponseSchema
|
||||
})
|
||||
])
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { members, totalCount } = await server.services.group.listGroupMembers({
|
||||
id: req.params.id,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { members, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/projects",
|
||||
@@ -243,11 +385,18 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_PROJECTS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_PROJECTS.limit),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_PROJECTS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedProjects).optional().describe(GROUPS.LIST_PROJECTS.filterProjects),
|
||||
search: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
|
||||
message: "Invalid pattern: only alphanumeric characters, - are allowed."
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.LIST_PROJECTS.search),
|
||||
filter: z.nativeEnum(FilterReturnedProjects).optional().describe(GROUPS.LIST_PROJECTS.filterProjects),
|
||||
orderBy: z
|
||||
.nativeEnum(EGroupProjectsOrderBy)
|
||||
.default(EGroupProjectsOrderBy.Name)
|
||||
.nativeEnum(GroupProjectsOrderBy)
|
||||
.default(GroupProjectsOrderBy.Name)
|
||||
.describe(GROUPS.LIST_PROJECTS.orderBy),
|
||||
orderDirection: z
|
||||
.nativeEnum(OrderByDirection)
|
||||
@@ -263,11 +412,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
description: true,
|
||||
type: true
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
)
|
||||
.extend({
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
@@ -325,6 +472,40 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:id/machine-identities/:machineIdentityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.ADD_MACHINE_IDENTITY.id),
|
||||
machineIdentityId: z.string().trim().describe(GROUPS.ADD_MACHINE_IDENTITY.machineIdentityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const machineIdentity = await server.services.group.addMachineIdentityToGroup({
|
||||
id: req.params.id,
|
||||
identityId: req.params.machineIdentityId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return machineIdentity;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id/users/:username",
|
||||
@@ -362,4 +543,38 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id/machine-identities/:machineIdentityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.DELETE_MACHINE_IDENTITY.id),
|
||||
machineIdentityId: z.string().trim().describe(GROUPS.DELETE_MACHINE_IDENTITY.machineIdentityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const machineIdentity = await server.services.group.removeMachineIdentityFromGroup({
|
||||
id: req.params.id,
|
||||
identityId: req.params.machineIdentityId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return machineIdentity;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -142,6 +142,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
data: {
|
||||
...req.body,
|
||||
...req.body.type,
|
||||
name: req.body.slug,
|
||||
permissions: req.body.permissions
|
||||
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
|
||||
@@ -56,7 +56,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
TAccessApprovalRequestReviewerDALFactory,
|
||||
"create" | "find" | "findOne" | "transaction" | "delete"
|
||||
>;
|
||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleUsers">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
@@ -182,7 +182,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
await Promise.all(
|
||||
approverGroupIds.map((groupApproverId) =>
|
||||
groupDAL
|
||||
.findAllGroupPossibleMembers({
|
||||
.findAllGroupPossibleUsers({
|
||||
orgId: actorOrgId,
|
||||
groupId: groupApproverId
|
||||
})
|
||||
|
||||
@@ -49,6 +49,7 @@ import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
|
||||
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { AcmeChallengeType, AcmeIdentifierType } from "../pki-acme/pki-acme-schemas";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
@@ -78,7 +79,9 @@ export type TCreateAuditLogDTO = {
|
||||
| ScimClientActor
|
||||
| PlatformActor
|
||||
| UnknownUserActor
|
||||
| KmipClientActor;
|
||||
| KmipClientActor
|
||||
| AcmeProfileActor
|
||||
| AcmeAccountActor;
|
||||
orgId?: string;
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
@@ -392,6 +395,7 @@ export enum EventType {
|
||||
CREATE_CERTIFICATE_REQUEST = "create-certificate-request",
|
||||
GET_CERTIFICATE_REQUEST = "get-certificate-request",
|
||||
GET_CERTIFICATE_FROM_REQUEST = "get-certificate-from-request",
|
||||
LIST_CERTIFICATE_REQUESTS = "list-certificate-requests",
|
||||
ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration",
|
||||
ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration",
|
||||
GET_PROJECT_SLACK_CONFIG = "get-project-slack-config",
|
||||
@@ -574,7 +578,32 @@ export enum EventType {
|
||||
APPROVAL_REQUEST_CANCEL = "approval-request-cancel",
|
||||
APPROVAL_REQUEST_GRANT_LIST = "approval-request-grant-list",
|
||||
APPROVAL_REQUEST_GRANT_GET = "approval-request-grant-get",
|
||||
APPROVAL_REQUEST_GRANT_REVOKE = "approval-request-grant-revoke"
|
||||
APPROVAL_REQUEST_GRANT_REVOKE = "approval-request-grant-revoke",
|
||||
|
||||
// PKI ACME
|
||||
CREATE_ACME_ACCOUNT = "create-acme-account",
|
||||
RETRIEVE_ACME_ACCOUNT = "retrieve-acme-account",
|
||||
CREATE_ACME_ORDER = "create-acme-order",
|
||||
FINALIZE_ACME_ORDER = "finalize-acme-order",
|
||||
DOWNLOAD_ACME_CERTIFICATE = "download-acme-certificate",
|
||||
RESPOND_TO_ACME_CHALLENGE = "respond-to-acme-challenge",
|
||||
PASS_ACME_CHALLENGE = "pass-acme-challenge",
|
||||
ATTEMPT_ACME_CHALLENGE = "attempt-acme-challenge",
|
||||
FAIL_ACME_CHALLENGE = "fail-acme-challenge",
|
||||
|
||||
// Dynamic Secrets
|
||||
CREATE_DYNAMIC_SECRET = "create-dynamic-secret",
|
||||
UPDATE_DYNAMIC_SECRET = "update-dynamic-secret",
|
||||
DELETE_DYNAMIC_SECRET = "delete-dynamic-secret",
|
||||
GET_DYNAMIC_SECRET = "get-dynamic-secret",
|
||||
LIST_DYNAMIC_SECRETS = "list-dynamic-secrets",
|
||||
|
||||
// Dynamic Secret Leases
|
||||
CREATE_DYNAMIC_SECRET_LEASE = "create-dynamic-secret-lease",
|
||||
DELETE_DYNAMIC_SECRET_LEASE = "delete-dynamic-secret-lease",
|
||||
RENEW_DYNAMIC_SECRET_LEASE = "renew-dynamic-secret-lease",
|
||||
LIST_DYNAMIC_SECRET_LEASES = "list-dynamic-secret-leases",
|
||||
GET_DYNAMIC_SECRET_LEASE = "get-dynamic-secret-lease"
|
||||
}
|
||||
|
||||
export const filterableSecretEvents: EventType[] = [
|
||||
@@ -615,6 +644,15 @@ interface KmipClientActorMetadata {
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface AcmeProfileActorMetadata {
|
||||
profileId: string;
|
||||
}
|
||||
|
||||
interface AcmeAccountActorMetadata {
|
||||
profileId: string;
|
||||
accountId: string;
|
||||
}
|
||||
|
||||
interface UnknownUserActorMetadata {}
|
||||
|
||||
export interface UserActor {
|
||||
@@ -652,7 +690,25 @@ export interface ScimClientActor {
|
||||
metadata: ScimClientActorMetadata;
|
||||
}
|
||||
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor | KmipClientActor;
|
||||
export interface AcmeProfileActor {
|
||||
type: ActorType.ACME_PROFILE;
|
||||
metadata: AcmeProfileActorMetadata;
|
||||
}
|
||||
|
||||
export interface AcmeAccountActor {
|
||||
type: ActorType.ACME_ACCOUNT;
|
||||
metadata: AcmeAccountActorMetadata;
|
||||
}
|
||||
|
||||
export type Actor =
|
||||
| UserActor
|
||||
| ServiceActor
|
||||
| IdentityActor
|
||||
| ScimClientActor
|
||||
| PlatformActor
|
||||
| KmipClientActor
|
||||
| AcmeProfileActor
|
||||
| AcmeAccountActor;
|
||||
|
||||
interface GetSecretsEvent {
|
||||
type: EventType.GET_SECRETS;
|
||||
@@ -4248,6 +4304,18 @@ interface GetCertificateFromRequestEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface ListCertificateRequestsEvent {
|
||||
type: EventType.LIST_CERTIFICATE_REQUESTS;
|
||||
metadata: {
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
status?: string;
|
||||
count: number;
|
||||
certificateRequestIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface ApprovalPolicyCreateEvent {
|
||||
type: EventType.APPROVAL_POLICY_CREATE;
|
||||
metadata: {
|
||||
@@ -4368,6 +4436,235 @@ interface ApprovalRequestGrantRevokeEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateAcmeAccountEvent {
|
||||
type: EventType.CREATE_ACME_ACCOUNT;
|
||||
metadata: {
|
||||
accountId: string;
|
||||
publicKeyThumbprint: string;
|
||||
emails?: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface RetrieveAcmeAccountEvent {
|
||||
type: EventType.RETRIEVE_ACME_ACCOUNT;
|
||||
metadata: {
|
||||
accountId: string;
|
||||
publicKeyThumbprint: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateAcmeOrderEvent {
|
||||
type: EventType.CREATE_ACME_ORDER;
|
||||
metadata: {
|
||||
orderId: string;
|
||||
identifiers: Array<{
|
||||
type: AcmeIdentifierType;
|
||||
value: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
interface FinalizeAcmeOrderEvent {
|
||||
type: EventType.FINALIZE_ACME_ORDER;
|
||||
metadata: {
|
||||
orderId: string;
|
||||
csr: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DownloadAcmeCertificateEvent {
|
||||
type: EventType.DOWNLOAD_ACME_CERTIFICATE;
|
||||
metadata: {
|
||||
orderId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RespondToAcmeChallengeEvent {
|
||||
type: EventType.RESPOND_TO_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
};
|
||||
}
|
||||
interface PassedAcmeChallengeEvent {
|
||||
type: EventType.PASS_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptAcmeChallengeEvent {
|
||||
type: EventType.ATTEMPT_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
retryCount: number;
|
||||
errorMessage: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface FailAcmeChallengeEvent {
|
||||
type: EventType.FAIL_ACME_CHALLENGE;
|
||||
metadata: {
|
||||
challengeId: string;
|
||||
type: AcmeChallengeType;
|
||||
retryCount: number;
|
||||
errorMessage: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetDynamicSecretLeaseEvent {
|
||||
type: EventType.GET_DYNAMIC_SECRET_LEASE;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
|
||||
leaseId: string;
|
||||
leaseExternalEntityId: string;
|
||||
leaseExpireAt: Date;
|
||||
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RenewDynamicSecretLeaseEvent {
|
||||
type: EventType.RENEW_DYNAMIC_SECRET_LEASE;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
|
||||
leaseId: string;
|
||||
leaseExternalEntityId: string;
|
||||
newLeaseExpireAt: Date;
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateDynamicSecretLeaseEvent {
|
||||
type: EventType.CREATE_DYNAMIC_SECRET_LEASE;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
|
||||
leaseId: string;
|
||||
leaseExternalEntityId: string;
|
||||
leaseExpireAt: Date;
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteDynamicSecretLeaseEvent {
|
||||
type: EventType.DELETE_DYNAMIC_SECRET_LEASE;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
|
||||
leaseId: string;
|
||||
leaseExternalEntityId: string;
|
||||
leaseStatus?: string | null;
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
|
||||
isForced: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateDynamicSecretEvent {
|
||||
type: EventType.CREATE_DYNAMIC_SECRET;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretType: string;
|
||||
dynamicSecretId: string;
|
||||
defaultTTL: string;
|
||||
maxTTL?: string | null;
|
||||
gatewayV2Id?: string | null;
|
||||
usernameTemplate?: string | null;
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateDynamicSecretEvent {
|
||||
type: EventType.UPDATE_DYNAMIC_SECRET;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
updatedFields: string[];
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteDynamicSecretEvent {
|
||||
type: EventType.DELETE_DYNAMIC_SECRET;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetDynamicSecretEvent {
|
||||
type: EventType.GET_DYNAMIC_SECRET;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface ListDynamicSecretsEvent {
|
||||
type: EventType.LIST_DYNAMIC_SECRETS;
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface ListDynamicSecretLeasesEvent {
|
||||
type: EventType.LIST_DYNAMIC_SECRET_LEASES;
|
||||
metadata: {
|
||||
dynamicSecretName: string;
|
||||
dynamicSecretId: string;
|
||||
dynamicSecretType: string;
|
||||
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
|
||||
leaseCount: number;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| CreateSubOrganizationEvent
|
||||
| UpdateSubOrganizationEvent
|
||||
@@ -4750,6 +5047,7 @@ export type Event =
|
||||
| CreateCertificateRequestEvent
|
||||
| GetCertificateRequestEvent
|
||||
| GetCertificateFromRequestEvent
|
||||
| ListCertificateRequestsEvent
|
||||
| AutomatedRenewCertificate
|
||||
| AutomatedRenewCertificateFailed
|
||||
| UserLoginEvent
|
||||
@@ -4768,4 +5066,23 @@ export type Event =
|
||||
| ApprovalRequestCancelEvent
|
||||
| ApprovalRequestGrantListEvent
|
||||
| ApprovalRequestGrantGetEvent
|
||||
| ApprovalRequestGrantRevokeEvent;
|
||||
| ApprovalRequestGrantRevokeEvent
|
||||
| CreateAcmeAccountEvent
|
||||
| RetrieveAcmeAccountEvent
|
||||
| CreateAcmeOrderEvent
|
||||
| FinalizeAcmeOrderEvent
|
||||
| DownloadAcmeCertificateEvent
|
||||
| RespondToAcmeChallengeEvent
|
||||
| PassedAcmeChallengeEvent
|
||||
| AttemptAcmeChallengeEvent
|
||||
| FailAcmeChallengeEvent
|
||||
| CreateDynamicSecretEvent
|
||||
| UpdateDynamicSecretEvent
|
||||
| DeleteDynamicSecretEvent
|
||||
| GetDynamicSecretEvent
|
||||
| ListDynamicSecretsEvent
|
||||
| ListDynamicSecretLeasesEvent
|
||||
| CreateDynamicSecretLeaseEvent
|
||||
| DeleteDynamicSecretLeaseEvent
|
||||
| RenewDynamicSecretLeaseEvent
|
||||
| GetDynamicSecretLeaseEvent;
|
||||
|
||||
@@ -179,7 +179,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
});
|
||||
|
||||
await dynamicSecretQueueService.setLeaseRevocation(dynamicSecretLease.id, dynamicSecretCfg.id, expireAt);
|
||||
return { lease: dynamicSecretLease, dynamicSecret: dynamicSecretCfg, data };
|
||||
return {
|
||||
lease: dynamicSecretLease,
|
||||
dynamicSecret: dynamicSecretCfg,
|
||||
data,
|
||||
projectId,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const renewLease: TDynamicSecretLeaseServiceFactory["renewLease"] = async ({
|
||||
@@ -277,7 +284,13 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
expireAt,
|
||||
externalEntityId: entityId
|
||||
});
|
||||
return updatedDynamicSecretLease;
|
||||
return {
|
||||
lease: updatedDynamicSecretLease,
|
||||
dynamicSecret: dynamicSecretCfg,
|
||||
projectId,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const revokeLease: TDynamicSecretLeaseServiceFactory["revokeLease"] = async ({
|
||||
@@ -364,12 +377,24 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
});
|
||||
// queue a job to retry the revocation at a later time
|
||||
await dynamicSecretQueueService.queueFailedRevocation(dynamicSecretLease.id, dynamicSecretCfg.id);
|
||||
return updatedDynamicSecretLease;
|
||||
return {
|
||||
lease: updatedDynamicSecretLease,
|
||||
dynamicSecret: dynamicSecretCfg,
|
||||
projectId,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
}
|
||||
|
||||
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
|
||||
const deletedDynamicSecretLease = await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
|
||||
return deletedDynamicSecretLease;
|
||||
return {
|
||||
lease: deletedDynamicSecretLease,
|
||||
dynamicSecret: dynamicSecretCfg,
|
||||
projectId,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const listLeases: TDynamicSecretLeaseServiceFactory["listLeases"] = async ({
|
||||
@@ -417,7 +442,13 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
);
|
||||
|
||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id });
|
||||
return dynamicSecretLeases;
|
||||
return {
|
||||
leases: dynamicSecretLeases,
|
||||
dynamicSecret: dynamicSecretCfg,
|
||||
projectId,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const getLeaseDetails: TDynamicSecretLeaseServiceFactory["getLeaseDetails"] = async ({
|
||||
@@ -469,7 +500,13 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
})
|
||||
);
|
||||
|
||||
return dynamicSecretLease;
|
||||
return {
|
||||
lease: dynamicSecretLease,
|
||||
dynamicSecret: dynamicSecretCfg,
|
||||
projectId,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
@@ -55,34 +55,36 @@ export type TDynamicSecretLeaseServiceFactory = {
|
||||
lease: TDynamicSecretLeases;
|
||||
dynamicSecret: TDynamicSecretWithMetadata;
|
||||
data: unknown;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}>;
|
||||
listLeases: (arg: TListDynamicSecretLeasesDTO) => Promise<{
|
||||
leases: TDynamicSecretLeases[];
|
||||
dynamicSecret: TDynamicSecretWithMetadata;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}>;
|
||||
revokeLease: (arg: TDeleteDynamicSecretLeaseDTO) => Promise<{
|
||||
lease: TDynamicSecretLeases;
|
||||
dynamicSecret: TDynamicSecretWithMetadata;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}>;
|
||||
renewLease: (arg: TRenewDynamicSecretLeaseDTO) => Promise<{
|
||||
lease: TDynamicSecretLeases;
|
||||
dynamicSecret: TDynamicSecretWithMetadata;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}>;
|
||||
listLeases: (arg: TListDynamicSecretLeasesDTO) => Promise<TDynamicSecretLeases[]>;
|
||||
revokeLease: (arg: TDeleteDynamicSecretLeaseDTO) => Promise<TDynamicSecretLeases>;
|
||||
renewLease: (arg: TRenewDynamicSecretLeaseDTO) => Promise<TDynamicSecretLeases>;
|
||||
getLeaseDetails: (arg: TDetailsDynamicSecretLeaseDTO) => Promise<{
|
||||
dynamicSecret: {
|
||||
id: string;
|
||||
name: string;
|
||||
version: number;
|
||||
type: string;
|
||||
defaultTTL: string;
|
||||
maxTTL: string | null | undefined;
|
||||
encryptedInput: Buffer;
|
||||
folderId: string;
|
||||
status: string | null | undefined;
|
||||
statusDetails: string | null | undefined;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
};
|
||||
version: number;
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
externalEntityId: string;
|
||||
expireAt: Date;
|
||||
dynamicSecretId: string;
|
||||
status?: string | null | undefined;
|
||||
config?: unknown;
|
||||
statusDetails?: string | null | undefined;
|
||||
dynamicSecret: TDynamicSecretWithMetadata;
|
||||
lease: TDynamicSecretLeases;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
@@ -6,6 +6,8 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
const ERROR_MESSAGE = "Invalid host";
|
||||
|
||||
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
@@ -40,13 +42,13 @@ export const verifyHostInputValidity = async (host: string, isGateway = false) =
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedHost = host.split(":")[0];
|
||||
const normalizedHost = host.split(":")[0].toLowerCase();
|
||||
const inputHostIps: string[] = [];
|
||||
if (net.isIPv4(host)) {
|
||||
inputHostIps.push(host);
|
||||
} else {
|
||||
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
throw new BadRequestError({ message: ERROR_MESSAGE });
|
||||
}
|
||||
try {
|
||||
const resolvedIps = await dns.resolve4(host);
|
||||
@@ -62,10 +64,10 @@ export const verifyHostInputValidity = async (host: string, isGateway = false) =
|
||||
|
||||
if (!(appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP || appCfg.ALLOW_INTERNAL_IP_CONNECTIONS)) {
|
||||
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
|
||||
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (isInternalIp) throw new BadRequestError({ message: ERROR_MESSAGE });
|
||||
}
|
||||
|
||||
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
|
||||
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (isAppUsedIps) throw new BadRequestError({ message: ERROR_MESSAGE });
|
||||
return inputHostIps;
|
||||
};
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { extractObjectFieldPaths } from "@app/lib/fn";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
@@ -44,6 +45,34 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
|
||||
};
|
||||
|
||||
const getUpdatedFieldPaths = (
|
||||
oldData: Record<string, unknown> | null | undefined,
|
||||
newData: Record<string, unknown> | null | undefined
|
||||
): string[] => {
|
||||
const updatedPaths = new Set<string>();
|
||||
|
||||
if (!newData || typeof newData !== "object") {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (!oldData || typeof oldData !== "object") {
|
||||
return [];
|
||||
}
|
||||
|
||||
Object.keys(newData).forEach((key) => {
|
||||
const oldValue = oldData?.[key];
|
||||
const newValue = newData[key];
|
||||
|
||||
if (JSON.stringify(oldValue) !== JSON.stringify(newValue)) {
|
||||
// Extract paths from the new value
|
||||
const paths = extractObjectFieldPaths(newValue, key);
|
||||
paths.forEach((path) => updatedPaths.add(path));
|
||||
}
|
||||
});
|
||||
|
||||
return Array.from(updatedPaths).sort();
|
||||
};
|
||||
|
||||
export const dynamicSecretServiceFactory = ({
|
||||
dynamicSecretDAL,
|
||||
dynamicSecretLeaseDAL,
|
||||
@@ -191,7 +220,13 @@ export const dynamicSecretServiceFactory = ({
|
||||
return cfg;
|
||||
});
|
||||
|
||||
return { ...dynamicSecretCfg, inputs };
|
||||
return {
|
||||
...dynamicSecretCfg,
|
||||
inputs,
|
||||
projectId: project.id,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const updateByName: TDynamicSecretServiceFactory["updateByName"] = async ({
|
||||
@@ -278,8 +313,26 @@ export const dynamicSecretServiceFactory = ({
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||
const oldInput = await selectedProvider.validateProviderInputs(decryptedStoredInput, { projectId });
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput, { projectId });
|
||||
|
||||
const updatedFields = getUpdatedFieldPaths(
|
||||
{
|
||||
...(oldInput as object),
|
||||
maxTTL: dynamicSecretCfg.maxTTL,
|
||||
defaultTTL: dynamicSecretCfg.defaultTTL,
|
||||
name: dynamicSecretCfg.name,
|
||||
usernameTemplate
|
||||
},
|
||||
{
|
||||
...(updatedInput as object),
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
name: newName ?? name,
|
||||
usernameTemplate
|
||||
}
|
||||
);
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
let isGatewayV1 = true;
|
||||
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
|
||||
@@ -357,7 +410,13 @@ export const dynamicSecretServiceFactory = ({
|
||||
return cfg;
|
||||
});
|
||||
|
||||
return { ...updatedDynamicCfg, inputs: updatedInput };
|
||||
return {
|
||||
dynamicSecret: updatedDynamicCfg,
|
||||
updatedFields,
|
||||
projectId: project.id,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const deleteByName: TDynamicSecretServiceFactory["deleteByName"] = async ({
|
||||
@@ -412,7 +471,12 @@ export const dynamicSecretServiceFactory = ({
|
||||
await Promise.all(leases.map(({ id: leaseId }) => dynamicSecretQueueService.unsetLeaseRevocation(leaseId)));
|
||||
|
||||
const deletedDynamicSecretCfg = await dynamicSecretDAL.deleteById(dynamicSecretCfg.id);
|
||||
return deletedDynamicSecretCfg;
|
||||
return {
|
||||
...deletedDynamicSecretCfg,
|
||||
environment: environmentSlug,
|
||||
secretPath: path,
|
||||
projectId: project.id
|
||||
};
|
||||
}
|
||||
// if leases exist we should flag it as deleting and then remove leases in background
|
||||
// then delete the main one
|
||||
@@ -421,11 +485,21 @@ export const dynamicSecretServiceFactory = ({
|
||||
status: DynamicSecretStatus.Deleting
|
||||
});
|
||||
await dynamicSecretQueueService.pruneDynamicSecret(updatedDynamicSecretCfg.id);
|
||||
return updatedDynamicSecretCfg;
|
||||
return {
|
||||
...updatedDynamicSecretCfg,
|
||||
environment: environmentSlug,
|
||||
secretPath: path,
|
||||
projectId: project.id
|
||||
};
|
||||
}
|
||||
// if no leases just delete the config
|
||||
const deletedDynamicSecretCfg = await dynamicSecretDAL.deleteById(dynamicSecretCfg.id);
|
||||
return deletedDynamicSecretCfg;
|
||||
return {
|
||||
...deletedDynamicSecretCfg,
|
||||
projectId: project.id,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const getDetails: TDynamicSecretServiceFactory["getDetails"] = async ({
|
||||
@@ -491,7 +565,13 @@ export const dynamicSecretServiceFactory = ({
|
||||
projectId
|
||||
})) as object;
|
||||
|
||||
return { ...dynamicSecretCfg, inputs: providerInputs };
|
||||
return {
|
||||
...dynamicSecretCfg,
|
||||
inputs: providerInputs,
|
||||
projectId: project.id,
|
||||
environment: environmentSlug,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
// get unique dynamic secret count across multiple envs
|
||||
@@ -622,16 +702,21 @@ export const dynamicSecretServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
return dynamicSecretCfg.filter((dynamicSecret) => {
|
||||
return permission.can(
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, {
|
||||
environment: environmentSlug,
|
||||
secretPath: path,
|
||||
metadata: dynamicSecret.metadata
|
||||
})
|
||||
);
|
||||
});
|
||||
return {
|
||||
dynamicSecrets: dynamicSecretCfg.filter((dynamicSecret) => {
|
||||
return permission.can(
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, {
|
||||
environment: environmentSlug,
|
||||
secretPath: path,
|
||||
metadata: dynamicSecret.metadata
|
||||
})
|
||||
);
|
||||
}),
|
||||
environment: environmentSlug,
|
||||
secretPath: path,
|
||||
projectId
|
||||
};
|
||||
};
|
||||
|
||||
const listDynamicSecretsByFolderIds: TDynamicSecretServiceFactory["listDynamicSecretsByFolderIds"] = async (
|
||||
|
||||
@@ -86,11 +86,28 @@ export type TGetDynamicSecretsCountDTO = Omit<TListDynamicSecretsDTO, "projectSl
|
||||
};
|
||||
|
||||
export type TDynamicSecretServiceFactory = {
|
||||
create: (arg: TCreateDynamicSecretDTO) => Promise<TDynamicSecrets>;
|
||||
updateByName: (arg: TUpdateDynamicSecretDTO) => Promise<TDynamicSecrets>;
|
||||
deleteByName: (arg: TDeleteDynamicSecretDTO) => Promise<TDynamicSecrets>;
|
||||
getDetails: (arg: TDetailsDynamicSecretDTO) => Promise<TDynamicSecretWithMetadata>;
|
||||
listDynamicSecretsByEnv: (arg: TListDynamicSecretsDTO) => Promise<TDynamicSecretWithMetadata[]>;
|
||||
create: (
|
||||
arg: TCreateDynamicSecretDTO
|
||||
) => Promise<TDynamicSecrets & { projectId: string; environment: string; secretPath: string }>;
|
||||
updateByName: (arg: TUpdateDynamicSecretDTO) => Promise<{
|
||||
dynamicSecret: TDynamicSecrets;
|
||||
updatedFields: string[];
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}>;
|
||||
deleteByName: (
|
||||
arg: TDeleteDynamicSecretDTO
|
||||
) => Promise<TDynamicSecrets & { projectId: string; environment: string; secretPath: string }>;
|
||||
getDetails: (
|
||||
arg: TDetailsDynamicSecretDTO
|
||||
) => Promise<TDynamicSecretWithMetadata & { projectId: string; environment: string; secretPath: string }>;
|
||||
listDynamicSecretsByEnv: (arg: TListDynamicSecretsDTO) => Promise<{
|
||||
dynamicSecrets: Array<TDynamicSecretWithMetadata>;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
}>;
|
||||
listDynamicSecretsByEnvs: (
|
||||
arg: TListDynamicSecretsMultiEnvDTO
|
||||
) => Promise<Array<TDynamicSecretWithMetadata & { environment: string }>>;
|
||||
|
||||
@@ -380,6 +380,7 @@ export const externalKmsServiceFactory = ({
|
||||
|
||||
const findById = async ({ actor, actorId, actorOrgId, actorAuthMethod, id: kmsId }: TGetExternalKmsByIdDTO) => {
|
||||
const kmsDoc = await kmsDAL.findById(kmsId);
|
||||
if (!kmsDoc) throw new NotFoundError({ message: `Could not find KMS with ID '${kmsId}'` });
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
|
||||
@@ -6,7 +6,14 @@ import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
|
||||
import { EFilterReturnedProjects, EFilterReturnedUsers, EGroupProjectsOrderBy } from "./group-types";
|
||||
import {
|
||||
FilterMemberType,
|
||||
FilterReturnedMachineIdentities,
|
||||
FilterReturnedProjects,
|
||||
FilterReturnedUsers,
|
||||
GroupMembersOrderBy,
|
||||
GroupProjectsOrderBy
|
||||
} from "./group-types";
|
||||
|
||||
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
|
||||
|
||||
@@ -70,7 +77,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
// special query
|
||||
const findAllGroupPossibleMembers = async ({
|
||||
const findAllGroupPossibleUsers = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
offset = 0,
|
||||
@@ -85,7 +92,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
limit?: number;
|
||||
username?: string;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedUsers;
|
||||
filter?: FilterReturnedUsers;
|
||||
}) => {
|
||||
try {
|
||||
const query = db
|
||||
@@ -127,11 +134,11 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case EFilterReturnedUsers.EXISTING_MEMBERS:
|
||||
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is not", null);
|
||||
case FilterReturnedUsers.EXISTING_MEMBERS:
|
||||
void query.whereNotNull(`${TableName.UserGroupMembership}.createdAt`);
|
||||
break;
|
||||
case EFilterReturnedUsers.NON_MEMBERS:
|
||||
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is", null);
|
||||
case FilterReturnedUsers.NON_MEMBERS:
|
||||
void query.whereNull(`${TableName.UserGroupMembership}.createdAt`);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@@ -155,7 +162,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
username: memberUsername,
|
||||
firstName,
|
||||
lastName,
|
||||
isPartOfGroup: !!memberGroupId,
|
||||
isPartOfGroup: Boolean(memberGroupId),
|
||||
joinedGroupAt
|
||||
})
|
||||
),
|
||||
@@ -167,6 +174,256 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findAllGroupPossibleMachineIdentities = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
offset = 0,
|
||||
limit,
|
||||
search,
|
||||
filter
|
||||
}: {
|
||||
orgId: string;
|
||||
groupId: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
filter?: FilterReturnedMachineIdentities;
|
||||
}) => {
|
||||
try {
|
||||
const query = db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scopeOrgId`, orgId)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Organization)
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.whereNull(`${TableName.Identity}.projectId`)
|
||||
.join(TableName.Identity, `${TableName.Membership}.actorIdentityId`, `${TableName.Identity}.id`)
|
||||
.leftJoin(TableName.IdentityGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.IdentityGroupMembership}.identityId`, "=", `${TableName.Identity}.id`).andOn(
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
);
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.Membership),
|
||||
db.ref("groupId").withSchema(TableName.IdentityGroupMembership),
|
||||
db.ref("createdAt").withSchema(TableName.IdentityGroupMembership).as("joinedGroupAt"),
|
||||
db.ref("name").withSchema(TableName.Identity),
|
||||
db.ref("id").withSchema(TableName.Identity).as("identityId"),
|
||||
db.raw(`count(*) OVER() as total_count`)
|
||||
)
|
||||
.offset(offset)
|
||||
.orderBy("name", "asc");
|
||||
|
||||
if (limit) {
|
||||
void query.limit(limit);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
void query.andWhereRaw(`LOWER("${TableName.Identity}"."name") ilike ?`, `%${search}%`);
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case FilterReturnedMachineIdentities.ASSIGNED_MACHINE_IDENTITIES:
|
||||
void query.whereNotNull(`${TableName.IdentityGroupMembership}.createdAt`);
|
||||
break;
|
||||
case FilterReturnedMachineIdentities.NON_ASSIGNED_MACHINE_IDENTITIES:
|
||||
void query.whereNull(`${TableName.IdentityGroupMembership}.createdAt`);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
const machineIdentities = await query;
|
||||
|
||||
return {
|
||||
machineIdentities: machineIdentities.map(({ name, identityId, joinedGroupAt, groupId: identityGroupId }) => ({
|
||||
id: identityId,
|
||||
name,
|
||||
isPartOfGroup: Boolean(identityGroupId),
|
||||
joinedGroupAt
|
||||
})),
|
||||
// @ts-expect-error col select is raw and not strongly typed
|
||||
totalCount: Number(machineIdentities?.[0]?.total_count ?? 0)
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all group identities" });
|
||||
}
|
||||
};
|
||||
|
||||
const findAllGroupPossibleMembers = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
offset = 0,
|
||||
limit,
|
||||
search,
|
||||
orderBy = GroupMembersOrderBy.Name,
|
||||
orderDirection = OrderByDirection.ASC,
|
||||
memberTypeFilter
|
||||
}: {
|
||||
orgId: string;
|
||||
groupId: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
orderBy?: GroupMembersOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
memberTypeFilter?: FilterMemberType[];
|
||||
}) => {
|
||||
try {
|
||||
const includeUsers =
|
||||
!memberTypeFilter || memberTypeFilter.length === 0 || memberTypeFilter.includes(FilterMemberType.USERS);
|
||||
const includeMachineIdentities =
|
||||
!memberTypeFilter ||
|
||||
memberTypeFilter.length === 0 ||
|
||||
memberTypeFilter.includes(FilterMemberType.MACHINE_IDENTITIES);
|
||||
|
||||
const query = db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scopeOrgId`, orgId)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Organization)
|
||||
.leftJoin(TableName.Users, `${TableName.Membership}.actorUserId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Identity, `${TableName.Membership}.actorIdentityId`, `${TableName.Identity}.id`)
|
||||
.leftJoin(TableName.UserGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.IdentityGroupMembership}.identityId`, "=", `${TableName.Identity}.id`).andOn(
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
);
|
||||
})
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.where((innerQb) => {
|
||||
void innerQb
|
||||
.whereNotNull(`${TableName.Membership}.actorUserId`)
|
||||
.whereNotNull(`${TableName.UserGroupMembership}.createdAt`)
|
||||
.where(`${TableName.Users}.isGhost`, false);
|
||||
})
|
||||
.orWhere((innerQb) => {
|
||||
void innerQb
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.whereNotNull(`${TableName.IdentityGroupMembership}.createdAt`)
|
||||
.whereNull(`${TableName.Identity}.projectId`);
|
||||
});
|
||||
})
|
||||
.select(
|
||||
db.raw(
|
||||
`CASE WHEN "${TableName.Membership}"."actorUserId" IS NOT NULL THEN "${TableName.UserGroupMembership}"."createdAt" ELSE "${TableName.IdentityGroupMembership}"."createdAt" END as "joinedGroupAt"`
|
||||
),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.raw(`"${TableName.Users}"."id"::text as "userId"`),
|
||||
db.raw(`"${TableName.Identity}"."id"::text as "identityId"`),
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.raw(
|
||||
`CASE WHEN "${TableName.Membership}"."actorUserId" IS NOT NULL THEN 'user' ELSE 'machineIdentity' END as "member_type"`
|
||||
),
|
||||
db.raw(`count(*) OVER() as total_count`)
|
||||
);
|
||||
|
||||
void query.andWhere((qb) => {
|
||||
if (includeUsers) {
|
||||
void qb.whereNotNull(`${TableName.Membership}.actorUserId`);
|
||||
}
|
||||
|
||||
if (includeMachineIdentities) {
|
||||
void qb[includeUsers ? "orWhere" : "where"]((innerQb) => {
|
||||
void innerQb.whereNotNull(`${TableName.Membership}.actorIdentityId`);
|
||||
});
|
||||
}
|
||||
|
||||
if (!includeUsers && !includeMachineIdentities) {
|
||||
void qb.whereRaw("FALSE");
|
||||
}
|
||||
});
|
||||
|
||||
if (search) {
|
||||
void query.andWhere((qb) => {
|
||||
void qb
|
||||
.whereRaw(
|
||||
`CONCAT_WS(' ', "${TableName.Users}"."firstName", "${TableName.Users}"."lastName", lower("${TableName.Users}"."username")) ilike ?`,
|
||||
[`%${search}%`]
|
||||
)
|
||||
.orWhereRaw(`LOWER("${TableName.Identity}"."name") ilike ?`, [`%${search}%`]);
|
||||
});
|
||||
}
|
||||
|
||||
if (orderBy === GroupMembersOrderBy.Name) {
|
||||
const orderDirectionClause = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
|
||||
|
||||
// This order by clause is used to sort the members by name.
|
||||
// It first checks if the full name (first name and last name) is not empty, then the username, then the email, then the identity name. If all of these are empty, it returns null.
|
||||
void query.orderByRaw(
|
||||
`LOWER(COALESCE(NULLIF(TRIM(CONCAT_WS(' ', "${TableName.Users}"."firstName", "${TableName.Users}"."lastName")), ''), "${TableName.Users}"."username", "${TableName.Users}"."email", "${TableName.Identity}"."name")) ${orderDirectionClause}`
|
||||
);
|
||||
}
|
||||
|
||||
if (offset) {
|
||||
void query.offset(offset);
|
||||
}
|
||||
if (limit) {
|
||||
void query.limit(limit);
|
||||
}
|
||||
|
||||
const results = (await query) as unknown as {
|
||||
email: string;
|
||||
username: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
userId: string;
|
||||
identityId: string;
|
||||
identityName: string;
|
||||
member_type: "user" | "machineIdentity";
|
||||
joinedGroupAt: Date;
|
||||
total_count: string;
|
||||
}[];
|
||||
|
||||
const members = results.map(
|
||||
({ email, username, firstName, lastName, userId, identityId, identityName, member_type, joinedGroupAt }) => {
|
||||
if (member_type === "user") {
|
||||
return {
|
||||
id: userId,
|
||||
joinedGroupAt,
|
||||
type: "user" as const,
|
||||
user: {
|
||||
id: userId,
|
||||
email,
|
||||
username,
|
||||
firstName,
|
||||
lastName
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
id: identityId,
|
||||
joinedGroupAt,
|
||||
type: "machineIdentity" as const,
|
||||
machineIdentity: {
|
||||
id: identityId,
|
||||
name: identityName
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
members,
|
||||
totalCount: Number(results?.[0]?.total_count ?? 0)
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all group possible members" });
|
||||
}
|
||||
};
|
||||
|
||||
const findAllGroupProjects = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
@@ -182,8 +439,8 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedProjects;
|
||||
orderBy?: EGroupProjectsOrderBy;
|
||||
filter?: FilterReturnedProjects;
|
||||
orderBy?: GroupProjectsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
}) => {
|
||||
try {
|
||||
@@ -225,10 +482,10 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case EFilterReturnedProjects.ASSIGNED_PROJECTS:
|
||||
case FilterReturnedProjects.ASSIGNED_PROJECTS:
|
||||
void query.whereNotNull(`${TableName.Membership}.id`);
|
||||
break;
|
||||
case EFilterReturnedProjects.UNASSIGNED_PROJECTS:
|
||||
case FilterReturnedProjects.UNASSIGNED_PROJECTS:
|
||||
void query.whereNull(`${TableName.Membership}.id`);
|
||||
break;
|
||||
default:
|
||||
@@ -313,6 +570,8 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
...groupOrm,
|
||||
findGroups,
|
||||
findByOrgId,
|
||||
findAllGroupPossibleUsers,
|
||||
findAllGroupPossibleMachineIdentities,
|
||||
findAllGroupPossibleMembers,
|
||||
findAllGroupProjects,
|
||||
findGroupsByProjectId,
|
||||
|
||||
@@ -5,9 +5,11 @@ import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, ScimRequestError } from "@app/lib/errors";
|
||||
|
||||
import {
|
||||
TAddIdentitiesToGroup,
|
||||
TAddUsersToGroup,
|
||||
TAddUsersToGroupByUserIds,
|
||||
TConvertPendingGroupAdditionsToGroupMemberships,
|
||||
TRemoveIdentitiesFromGroup,
|
||||
TRemoveUsersFromGroupByUserIds
|
||||
} from "./group-types";
|
||||
|
||||
@@ -285,6 +287,70 @@ export const addUsersToGroupByUserIds = async ({
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add identities with identity ids [identityIds] to group [group].
|
||||
* @param {group} group - group to add identity(s) to
|
||||
* @param {string[]} identityIds - id(s) of organization scoped identity(s) to add to group
|
||||
* @returns {Promise<{ id: string }[]>} - id(s) of added identity(s)
|
||||
*/
|
||||
export const addIdentitiesToGroup = async ({
|
||||
group,
|
||||
identityIds,
|
||||
identityDAL,
|
||||
identityGroupMembershipDAL,
|
||||
membershipDAL
|
||||
}: TAddIdentitiesToGroup) => {
|
||||
const identityIdsSet = new Set(identityIds);
|
||||
const identityIdsArray = Array.from(identityIdsSet);
|
||||
|
||||
// ensure all identities exist and belong to the org via org scoped membership
|
||||
const foundIdentitiesMemberships = await membershipDAL.find({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
$in: {
|
||||
actorIdentityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
const existingIdentityOrgMembershipsIdentityIdsSet = new Set(
|
||||
foundIdentitiesMemberships.map((u) => u.actorIdentityId as string)
|
||||
);
|
||||
|
||||
identityIdsArray.forEach((identityId) => {
|
||||
if (!existingIdentityOrgMembershipsIdentityIdsSet.has(identityId)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `Identity with id ${identityId} is not part of the organization`
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// check if identity group membership already exists
|
||||
const existingIdentityGroupMemberships = await identityGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
identityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
if (existingIdentityGroupMemberships.length) {
|
||||
throw new BadRequestError({
|
||||
message: `${identityIdsArray.length > 1 ? `Identities are` : `Identity is`} already part of the group ${group.slug}`
|
||||
});
|
||||
}
|
||||
|
||||
return identityDAL.transaction(async (tx) => {
|
||||
await identityGroupMembershipDAL.insertMany(
|
||||
foundIdentitiesMemberships.map((membership) => ({
|
||||
identityId: membership.actorIdentityId as string,
|
||||
groupId: group.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
return identityIdsArray.map((identityId) => ({ id: identityId }));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Remove users with user ids [userIds] from group [group].
|
||||
* - Users may be part of the group (non-pending + pending);
|
||||
@@ -421,6 +487,75 @@ export const removeUsersFromGroupByUserIds = async ({
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Remove identities with identity ids [identityIds] from group [group].
|
||||
* @param {group} group - group to remove identity(s) from
|
||||
* @param {string[]} identityIds - id(s) of identity(s) to remove from group
|
||||
* @returns {Promise<{ id: string }[]>} - id(s) of removed identity(s)
|
||||
*/
|
||||
export const removeIdentitiesFromGroup = async ({
|
||||
group,
|
||||
identityIds,
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL
|
||||
}: TRemoveIdentitiesFromGroup) => {
|
||||
const identityIdsSet = new Set(identityIds);
|
||||
const identityIdsArray = Array.from(identityIdsSet);
|
||||
|
||||
// ensure all identities exist and belong to the org via org scoped membership
|
||||
const foundIdentitiesMemberships = await membershipDAL.find({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
$in: {
|
||||
actorIdentityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
const foundIdentitiesMembershipsIdentityIdsSet = new Set(
|
||||
foundIdentitiesMemberships.map((u) => u.actorIdentityId as string)
|
||||
);
|
||||
|
||||
if (foundIdentitiesMembershipsIdentityIdsSet.size !== identityIdsArray.length) {
|
||||
throw new NotFoundError({
|
||||
message: `Machine identities not found`
|
||||
});
|
||||
}
|
||||
|
||||
// check if identity group membership already exists
|
||||
const existingIdentityGroupMemberships = await identityGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
identityId: identityIdsArray
|
||||
}
|
||||
});
|
||||
|
||||
const existingIdentityGroupMembershipsIdentityIdsSet = new Set(
|
||||
existingIdentityGroupMemberships.map((u) => u.identityId)
|
||||
);
|
||||
|
||||
identityIdsArray.forEach((identityId) => {
|
||||
if (!existingIdentityGroupMembershipsIdentityIdsSet.has(identityId)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `Machine identities are not part of the group ${group.slug}`
|
||||
});
|
||||
}
|
||||
});
|
||||
return identityDAL.transaction(async (tx) => {
|
||||
await identityGroupMembershipDAL.delete(
|
||||
{
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
identityId: identityIdsArray
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return identityIdsArray.map((identityId) => ({ id: identityId }));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert pending group additions for users with ids [userIds] to group memberships.
|
||||
* @param {string[]} userIds - id(s) of user(s) to try to convert pending group additions to group memberships
|
||||
|
||||
@@ -5,6 +5,8 @@ import { AccessScope, OrganizationActionScope, OrgMembershipRole, TRoles } from
|
||||
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { TMembershipDALFactory } from "@app/services/membership/membership-dal";
|
||||
import { TMembershipRoleDALFactory } from "@app/services/membership/membership-role-dal";
|
||||
import { TMembershipGroupDALFactory } from "@app/services/membership-group/membership-group-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@@ -18,33 +20,48 @@ import { OrgPermissionGroupActions, OrgPermissionSubjects } from "../permission/
|
||||
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { TGroupDALFactory } from "./group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "./group-fns";
|
||||
import {
|
||||
addIdentitiesToGroup,
|
||||
addUsersToGroupByUserIds,
|
||||
removeIdentitiesFromGroup,
|
||||
removeUsersFromGroupByUserIds
|
||||
} from "./group-fns";
|
||||
import {
|
||||
TAddMachineIdentityToGroupDTO,
|
||||
TAddUserToGroupDTO,
|
||||
TCreateGroupDTO,
|
||||
TDeleteGroupDTO,
|
||||
TGetGroupByIdDTO,
|
||||
TListGroupMachineIdentitiesDTO,
|
||||
TListGroupMembersDTO,
|
||||
TListGroupProjectsDTO,
|
||||
TListGroupUsersDTO,
|
||||
TRemoveMachineIdentityFromGroupDTO,
|
||||
TRemoveUserFromGroupDTO,
|
||||
TUpdateGroupDTO
|
||||
} from "./group-types";
|
||||
import { TIdentityGroupMembershipDALFactory } from "./identity-group-membership-dal";
|
||||
import { TUserGroupMembershipDALFactory } from "./user-group-membership-dal";
|
||||
|
||||
type TGroupServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findUserByUsername">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "findOne" | "find" | "transaction">;
|
||||
identityGroupMembershipDAL: Pick<TIdentityGroupMembershipDALFactory, "find" | "delete" | "insertMany">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "update"
|
||||
| "delete"
|
||||
| "findAllGroupPossibleUsers"
|
||||
| "findAllGroupPossibleMachineIdentities"
|
||||
| "findAllGroupPossibleMembers"
|
||||
| "findById"
|
||||
| "transaction"
|
||||
| "findAllGroupProjects"
|
||||
>;
|
||||
membershipGroupDAL: Pick<TMembershipGroupDALFactory, "find" | "findOne" | "create">;
|
||||
membershipDAL: Pick<TMembershipDALFactory, "find" | "findOne">;
|
||||
membershipRoleDAL: Pick<TMembershipRoleDALFactory, "create" | "delete">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "countAllOrgMembers" | "findById">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
@@ -65,6 +82,9 @@ type TGroupServiceFactoryDep = {
|
||||
export type TGroupServiceFactory = ReturnType<typeof groupServiceFactory>;
|
||||
|
||||
export const groupServiceFactory = ({
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL,
|
||||
userDAL,
|
||||
groupDAL,
|
||||
orgDAL,
|
||||
@@ -362,7 +382,7 @@ export const groupServiceFactory = ({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleUsers({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id,
|
||||
offset,
|
||||
@@ -375,6 +395,100 @@ export const groupServiceFactory = ({
|
||||
return { users: members, totalCount };
|
||||
};
|
||||
|
||||
const listGroupMachineIdentities = async ({
|
||||
id,
|
||||
offset,
|
||||
limit,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
search,
|
||||
filter
|
||||
}: TListGroupMachineIdentitiesDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const { machineIdentities, totalCount } = await groupDAL.findAllGroupPossibleMachineIdentities({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
filter
|
||||
});
|
||||
|
||||
return { machineIdentities, totalCount };
|
||||
};
|
||||
|
||||
const listGroupMembers = async ({
|
||||
id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
memberTypeFilter,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TListGroupMembersDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
memberTypeFilter
|
||||
});
|
||||
|
||||
return { members, totalCount };
|
||||
};
|
||||
|
||||
const listGroupProjects = async ({
|
||||
id,
|
||||
offset,
|
||||
@@ -504,6 +618,81 @@ export const groupServiceFactory = ({
|
||||
return users[0];
|
||||
};
|
||||
|
||||
const addMachineIdentityToGroup = async ({
|
||||
id,
|
||||
identityId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TAddMachineIdentityToGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
// check if group with slug exists
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([group.role], actorOrgId);
|
||||
const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.AddIdentities,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
rolePermissionDetails.permission
|
||||
);
|
||||
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to add identity to more privileged group",
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.AddIdentities,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const identityMembership = await membershipDAL.findOne({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
actorIdentityId: identityId
|
||||
});
|
||||
|
||||
if (!identityMembership) {
|
||||
throw new NotFoundError({ message: `Identity with id ${identityId} is not part of the organization` });
|
||||
}
|
||||
|
||||
const identities = await addIdentitiesToGroup({
|
||||
group,
|
||||
identityIds: [identityId],
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL
|
||||
});
|
||||
|
||||
return identities[0];
|
||||
};
|
||||
|
||||
const removeUserFromGroup = async ({
|
||||
id,
|
||||
username,
|
||||
@@ -587,14 +776,91 @@ export const groupServiceFactory = ({
|
||||
return users[0];
|
||||
};
|
||||
|
||||
const removeMachineIdentityFromGroup = async ({
|
||||
id,
|
||||
identityId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TRemoveMachineIdentityFromGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([group.role], actorOrgId);
|
||||
const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.RemoveIdentities,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
rolePermissionDetails.permission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to remove identity from more privileged group",
|
||||
shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.RemoveIdentities,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const identityMembership = await membershipDAL.findOne({
|
||||
scope: AccessScope.Organization,
|
||||
scopeOrgId: group.orgId,
|
||||
actorIdentityId: identityId
|
||||
});
|
||||
|
||||
if (!identityMembership) {
|
||||
throw new NotFoundError({ message: `Identity with id ${identityId} is not part of the organization` });
|
||||
}
|
||||
|
||||
const identities = await removeIdentitiesFromGroup({
|
||||
group,
|
||||
identityIds: [identityId],
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL
|
||||
});
|
||||
|
||||
return identities[0];
|
||||
};
|
||||
|
||||
return {
|
||||
createGroup,
|
||||
updateGroup,
|
||||
deleteGroup,
|
||||
listGroupUsers,
|
||||
listGroupMachineIdentities,
|
||||
listGroupMembers,
|
||||
listGroupProjects,
|
||||
addUserToGroup,
|
||||
addMachineIdentityToGroup,
|
||||
removeUserFromGroup,
|
||||
removeMachineIdentityFromGroup,
|
||||
getGroupById
|
||||
};
|
||||
};
|
||||
|
||||
@@ -3,6 +3,8 @@ import { Knex } from "knex";
|
||||
import { TGroups } from "@app/db/schemas";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { OrderByDirection, TGenericPermission } from "@app/lib/types";
|
||||
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { TMembershipDALFactory } from "@app/services/membership/membership-dal";
|
||||
import { TMembershipGroupDALFactory } from "@app/services/membership-group/membership-group-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@@ -10,6 +12,8 @@ import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TIdentityGroupMembershipDALFactory } from "./identity-group-membership-dal";
|
||||
|
||||
export type TCreateGroupDTO = {
|
||||
name: string;
|
||||
slug?: string;
|
||||
@@ -39,7 +43,25 @@ export type TListGroupUsersDTO = {
|
||||
limit: number;
|
||||
username?: string;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedUsers;
|
||||
filter?: FilterReturnedUsers;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListGroupMachineIdentitiesDTO = {
|
||||
id: string;
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
filter?: FilterReturnedMachineIdentities;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListGroupMembersDTO = {
|
||||
id: string;
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
orderBy?: GroupMembersOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
memberTypeFilter?: FilterMemberType[];
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListGroupProjectsDTO = {
|
||||
@@ -47,8 +69,8 @@ export type TListGroupProjectsDTO = {
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedProjects;
|
||||
orderBy?: EGroupProjectsOrderBy;
|
||||
filter?: FilterReturnedProjects;
|
||||
orderBy?: GroupProjectsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
} & TGenericPermission;
|
||||
|
||||
@@ -61,11 +83,21 @@ export type TAddUserToGroupDTO = {
|
||||
username: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TAddMachineIdentityToGroupDTO = {
|
||||
id: string;
|
||||
identityId: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TRemoveUserFromGroupDTO = {
|
||||
id: string;
|
||||
username: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TRemoveMachineIdentityFromGroupDTO = {
|
||||
id: string;
|
||||
identityId: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
// group fns types
|
||||
|
||||
export type TAddUsersToGroup = {
|
||||
@@ -93,6 +125,14 @@ export type TAddUsersToGroupByUserIds = {
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export type TAddIdentitiesToGroup = {
|
||||
group: TGroups;
|
||||
identityIds: string[];
|
||||
identityDAL: Pick<TIdentityDALFactory, "transaction">;
|
||||
identityGroupMembershipDAL: Pick<TIdentityGroupMembershipDALFactory, "find" | "insertMany">;
|
||||
membershipDAL: Pick<TMembershipDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TRemoveUsersFromGroupByUserIds = {
|
||||
group: TGroups;
|
||||
userIds: string[];
|
||||
@@ -103,6 +143,14 @@ export type TRemoveUsersFromGroupByUserIds = {
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export type TRemoveIdentitiesFromGroup = {
|
||||
group: TGroups;
|
||||
identityIds: string[];
|
||||
identityDAL: Pick<TIdentityDALFactory, "find" | "transaction">;
|
||||
membershipDAL: Pick<TMembershipDALFactory, "find">;
|
||||
identityGroupMembershipDAL: Pick<TIdentityGroupMembershipDALFactory, "find" | "delete">;
|
||||
};
|
||||
|
||||
export type TConvertPendingGroupAdditionsToGroupMemberships = {
|
||||
userIds: string[];
|
||||
userDAL: Pick<TUserDALFactory, "findUserEncKeyByUserIdsBatch" | "transaction" | "find" | "findById">;
|
||||
@@ -117,16 +165,30 @@ export type TConvertPendingGroupAdditionsToGroupMemberships = {
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export enum EFilterReturnedUsers {
|
||||
export enum FilterReturnedUsers {
|
||||
EXISTING_MEMBERS = "existingMembers",
|
||||
NON_MEMBERS = "nonMembers"
|
||||
}
|
||||
|
||||
export enum EFilterReturnedProjects {
|
||||
export enum FilterReturnedMachineIdentities {
|
||||
ASSIGNED_MACHINE_IDENTITIES = "assignedMachineIdentities",
|
||||
NON_ASSIGNED_MACHINE_IDENTITIES = "nonAssignedMachineIdentities"
|
||||
}
|
||||
|
||||
export enum FilterReturnedProjects {
|
||||
ASSIGNED_PROJECTS = "assignedProjects",
|
||||
UNASSIGNED_PROJECTS = "unassignedProjects"
|
||||
}
|
||||
|
||||
export enum EGroupProjectsOrderBy {
|
||||
export enum GroupProjectsOrderBy {
|
||||
Name = "name"
|
||||
}
|
||||
|
||||
export enum GroupMembersOrderBy {
|
||||
Name = "name"
|
||||
}
|
||||
|
||||
export enum FilterMemberType {
|
||||
USERS = "users",
|
||||
MACHINE_IDENTITIES = "machineIdentities"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TIdentityGroupMembershipDALFactory = ReturnType<typeof identityGroupMembershipDALFactory>;
|
||||
|
||||
export const identityGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
const identityGroupMembershipOrm = ormify(db, TableName.IdentityGroupMembership);
|
||||
|
||||
return {
|
||||
...identityGroupMembershipOrm
|
||||
};
|
||||
};
|
||||
@@ -88,8 +88,10 @@ export enum OrgPermissionGroupActions {
|
||||
Edit = "edit",
|
||||
Delete = "delete",
|
||||
GrantPrivileges = "grant-privileges",
|
||||
AddIdentities = "add-identities",
|
||||
AddMembers = "add-members",
|
||||
RemoveMembers = "remove-members"
|
||||
RemoveMembers = "remove-members",
|
||||
RemoveIdentities = "remove-identities"
|
||||
}
|
||||
|
||||
export enum OrgPermissionBillingActions {
|
||||
@@ -381,8 +383,10 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.GrantPrivileges, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.AddIdentities, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.AddMembers, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.RemoveMembers, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.RemoveIdentities, OrgPermissionSubjects.Groups);
|
||||
|
||||
can(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionBillingActions.ManageBilling, OrgPermissionSubjects.Billing);
|
||||
|
||||
@@ -178,6 +178,16 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
.where(`${TableName.UserGroupMembership}.userId`, actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const identityGroupSubquery = (tx || db)(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, scopeData.orgId)
|
||||
.where(`${TableName.IdentityGroupMembership}.identityId`, actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const docs = await (tx || db)
|
||||
.replicaNode()(TableName.Membership)
|
||||
.join(TableName.MembershipRole, `${TableName.Membership}.id`, `${TableName.MembershipRole}.membershipId`)
|
||||
@@ -214,7 +224,9 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
.where(`${TableName.Membership}.actorUserId`, actorId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, userGroupSubquery);
|
||||
} else if (actorType === ActorType.IDENTITY) {
|
||||
void qb.where(`${TableName.Membership}.actorIdentityId`, actorId);
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorIdentityId`, actorId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupSubquery);
|
||||
}
|
||||
})
|
||||
.where((qb) => {
|
||||
@@ -653,6 +665,15 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
orgId: string
|
||||
) => {
|
||||
try {
|
||||
const identityGroupSubquery = db(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, orgId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.join(TableName.MembershipRole, `${TableName.Membership}.id`, `${TableName.MembershipRole}.membershipId`)
|
||||
@@ -668,7 +689,11 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
|
||||
void queryBuilder.on(`${TableName.Membership}.actorIdentityId`, `${TableName.IdentityMetadata}.identityId`);
|
||||
})
|
||||
.where(`${TableName.Membership}.scopeOrgId`, orgId)
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.whereNotNull(`${TableName.Membership}.actorIdentityId`)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupSubquery);
|
||||
})
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Project)
|
||||
.where(`${TableName.Membership}.scopeProjectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.MembershipRole))
|
||||
|
||||
@@ -122,6 +122,11 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
const result = await (tx || db)(TableName.PkiAcmeChallenge)
|
||||
.join(TableName.PkiAcmeAuth, `${TableName.PkiAcmeChallenge}.authId`, `${TableName.PkiAcmeAuth}.id`)
|
||||
.join(TableName.PkiAcmeAccount, `${TableName.PkiAcmeAuth}.accountId`, `${TableName.PkiAcmeAccount}.id`)
|
||||
.join(
|
||||
TableName.PkiCertificateProfile,
|
||||
`${TableName.PkiAcmeAccount}.profileId`,
|
||||
`${TableName.PkiCertificateProfile}.id`
|
||||
)
|
||||
.select(
|
||||
selectAllTableCols(TableName.PkiAcmeChallenge),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeAuth).as("authId"),
|
||||
@@ -131,7 +136,9 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
db.ref("identifierValue").withSchema(TableName.PkiAcmeAuth).as("authIdentifierValue"),
|
||||
db.ref("expiresAt").withSchema(TableName.PkiAcmeAuth).as("authExpiresAt"),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeAccount).as("accountId"),
|
||||
db.ref("publicKeyThumbprint").withSchema(TableName.PkiAcmeAccount).as("accountPublicKeyThumbprint")
|
||||
db.ref("publicKeyThumbprint").withSchema(TableName.PkiAcmeAccount).as("accountPublicKeyThumbprint"),
|
||||
db.ref("profileId").withSchema(TableName.PkiAcmeAccount).as("profileId"),
|
||||
db.ref("projectId").withSchema(TableName.PkiCertificateProfile).as("projectId")
|
||||
)
|
||||
// For all challenges, acquire update lock on the auth to avoid race conditions
|
||||
.forUpdate(TableName.PkiAcmeAuth)
|
||||
@@ -149,6 +156,8 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
authExpiresAt,
|
||||
accountId,
|
||||
accountPublicKeyThumbprint,
|
||||
profileId,
|
||||
projectId,
|
||||
...challenge
|
||||
} = result;
|
||||
return {
|
||||
@@ -161,7 +170,11 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
|
||||
expiresAt: authExpiresAt,
|
||||
account: {
|
||||
id: accountId,
|
||||
publicKeyThumbprint: accountPublicKeyThumbprint
|
||||
publicKeyThumbprint: accountPublicKeyThumbprint,
|
||||
project: {
|
||||
id: projectId
|
||||
},
|
||||
profileId
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
import { resolve4, Resolver } from "node:dns/promises";
|
||||
|
||||
import axios, { AxiosError } from "axios";
|
||||
|
||||
import { TPkiAcmeChallenges } from "@app/db/schemas/pki-acme-challenges";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types";
|
||||
import { TPkiAcmeChallengeDALFactory } from "./pki-acme-challenge-dal";
|
||||
import {
|
||||
AcmeConnectionError,
|
||||
@@ -15,6 +20,7 @@ import {
|
||||
} from "./pki-acme-errors";
|
||||
import { AcmeAuthStatus, AcmeChallengeStatus, AcmeChallengeType } from "./pki-acme-schemas";
|
||||
import { TPkiAcmeChallengeServiceFactory } from "./pki-acme-types";
|
||||
import { isValidIp } from "@app/lib/ip";
|
||||
|
||||
type TPkiAcmeChallengeServiceFactoryDep = {
|
||||
acmeChallengeDAL: Pick<
|
||||
@@ -25,12 +31,17 @@ type TPkiAcmeChallengeServiceFactoryDep = {
|
||||
| "markAsInvalidCascadeById"
|
||||
| "updateById"
|
||||
>;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
};
|
||||
|
||||
export const pkiAcmeChallengeServiceFactory = ({
|
||||
acmeChallengeDAL
|
||||
acmeChallengeDAL,
|
||||
auditLogService
|
||||
}: TPkiAcmeChallengeServiceFactoryDep): TPkiAcmeChallengeServiceFactory => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
type ChallengeWithAuth = NonNullable<Awaited<ReturnType<typeof acmeChallengeDAL.findByIdForChallengeValidation>>>;
|
||||
|
||||
const markChallengeAsReady = async (challengeId: string): Promise<TPkiAcmeChallenges> => {
|
||||
return acmeChallengeDAL.transaction(async (tx) => {
|
||||
logger.info({ challengeId }, "Validating ACME challenge response");
|
||||
@@ -51,31 +62,19 @@ export const pkiAcmeChallengeServiceFactory = ({
|
||||
message: `ACME auth status is ${challenge.auth.status} instead of ${AcmeAuthStatus.Pending}`
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: support other challenge types here. Currently only HTTP-01 is supported
|
||||
if (challenge.type !== AcmeChallengeType.HTTP_01) {
|
||||
throw new BadRequestError({ message: "Only HTTP-01 challenges are supported for now" });
|
||||
}
|
||||
const host = challenge.auth.identifierValue;
|
||||
// check if host is a private ip address
|
||||
if (isPrivateIp(host)) {
|
||||
throw new BadRequestError({ message: "Private IP addresses are not allowed" });
|
||||
}
|
||||
if (challenge.type !== AcmeChallengeType.HTTP_01 && challenge.type !== AcmeChallengeType.DNS_01) {
|
||||
throw new BadRequestError({ message: "Only HTTP-01 or DNS-01 challenges are supported for now" });
|
||||
}
|
||||
return acmeChallengeDAL.updateById(challengeId, { status: AcmeChallengeStatus.Processing }, tx);
|
||||
});
|
||||
};
|
||||
|
||||
const validateChallengeResponse = async (challengeId: string, retryCount: number): Promise<void> => {
|
||||
logger.info({ challengeId, retryCount }, "Validating ACME challenge response");
|
||||
const challenge = await acmeChallengeDAL.findByIdForChallengeValidation(challengeId);
|
||||
if (!challenge) {
|
||||
throw new NotFoundError({ message: "ACME challenge not found" });
|
||||
}
|
||||
if (challenge.status !== AcmeChallengeStatus.Processing) {
|
||||
throw new BadRequestError({
|
||||
message: `ACME challenge is ${challenge.status} instead of ${AcmeChallengeStatus.Processing}`
|
||||
});
|
||||
}
|
||||
const validateHttp01Challenge = async (challenge: ChallengeWithAuth): Promise<void> => {
|
||||
let host = challenge.auth.identifierValue;
|
||||
if (appCfg.isAcmeDevelopmentMode && appCfg.ACME_DEVELOPMENT_HTTP01_CHALLENGE_HOST_OVERRIDES[host]) {
|
||||
host = appCfg.ACME_DEVELOPMENT_HTTP01_CHALLENGE_HOST_OVERRIDES[host];
|
||||
@@ -86,42 +85,85 @@ export const pkiAcmeChallengeServiceFactory = ({
|
||||
}
|
||||
const challengeUrl = new URL(`/.well-known/acme-challenge/${challenge.auth.token}`, `http://${host}`);
|
||||
logger.info({ challengeUrl }, "Performing ACME HTTP-01 challenge validation");
|
||||
try {
|
||||
// TODO: read config from the profile to get the timeout instead
|
||||
const timeoutMs = 10 * 1000; // 10 seconds
|
||||
// Notice: well, we are in a transaction, ideally we should not hold transaction and perform
|
||||
// a long running operation for long time. But assuming we are not performing a tons of
|
||||
// challenge validation at the same time, it should be fine.
|
||||
const challengeResponse = await axios.get<string>(challengeUrl.toString(), {
|
||||
// In case if we override the host in the development mode, still provide the original host in the header
|
||||
// to help the upstream server to validate the request
|
||||
headers: { Host: challenge.auth.identifierValue },
|
||||
timeout: timeoutMs,
|
||||
responseType: "text",
|
||||
validateStatus: () => true
|
||||
|
||||
// TODO: read config from the profile to get the timeout instead
|
||||
const timeoutMs = 10 * 1000; // 10 seconds
|
||||
// Notice: well, we are in a transaction, ideally we should not hold transaction and perform
|
||||
// a long running operation for long time. But assuming we are not performing a tons of
|
||||
// challenge validation at the same time, it should be fine.
|
||||
const challengeResponse = await axios.get<string>(challengeUrl.toString(), {
|
||||
// In case if we override the host in the development mode, still provide the original host in the header
|
||||
// to help the upstream server to validate the request
|
||||
headers: { Host: challenge.auth.identifierValue },
|
||||
timeout: timeoutMs,
|
||||
responseType: "text",
|
||||
validateStatus: () => true
|
||||
});
|
||||
|
||||
if (challengeResponse.status !== 200) {
|
||||
throw new AcmeIncorrectResponseError({
|
||||
message: `ACME challenge response is not 200: ${challengeResponse.status}`
|
||||
});
|
||||
if (challengeResponse.status !== 200) {
|
||||
throw new AcmeIncorrectResponseError({
|
||||
message: `ACME challenge response is not 200: ${challengeResponse.status}`
|
||||
});
|
||||
}
|
||||
const challengeResponseBody: string = challengeResponse.data;
|
||||
const thumbprint = challenge.auth.account.publicKeyThumbprint;
|
||||
const expectedChallengeResponseBody = `${challenge.auth.token}.${thumbprint}`;
|
||||
if (challengeResponseBody.trimEnd() !== expectedChallengeResponseBody) {
|
||||
throw new AcmeIncorrectResponseError({ message: "ACME challenge response is not correct" });
|
||||
}
|
||||
logger.info({ challengeId }, "ACME challenge response is correct, marking challenge as valid");
|
||||
await acmeChallengeDAL.markAsValidCascadeById(challengeId);
|
||||
} catch (exp) {
|
||||
if (retryCount >= 2) {
|
||||
logger.error(
|
||||
exp,
|
||||
`Last attempt to validate ACME challenge response failed, marking ${challengeId} challenge as invalid`
|
||||
);
|
||||
// This is the last attempt to validate the challenge response, if it fails, we mark the challenge as invalid
|
||||
await acmeChallengeDAL.markAsInvalidCascadeById(challengeId);
|
||||
}
|
||||
}
|
||||
|
||||
const challengeResponseBody: string = challengeResponse.data;
|
||||
const thumbprint = challenge.auth.account.publicKeyThumbprint;
|
||||
const expectedChallengeResponseBody = `${challenge.auth.token}.${thumbprint}`;
|
||||
|
||||
if (challengeResponseBody.trimEnd() !== expectedChallengeResponseBody) {
|
||||
throw new AcmeIncorrectResponseError({ message: "ACME HTTP-01 challenge response is not correct" });
|
||||
}
|
||||
};
|
||||
|
||||
const validateDns01Challenge = async (challenge: ChallengeWithAuth): Promise<void> => {
|
||||
const resolver = new Resolver();
|
||||
if (appCfg.ACME_DNS_RESOLVER_SERVERS.length > 0) {
|
||||
const servers = appCfg.ACME_DNS_RESOLVE_RESOLVER_SERVERS_HOST_ENABLED
|
||||
? await Promise.all(
|
||||
appCfg.ACME_DNS_RESOLVER_SERVERS.map(async (server) => {
|
||||
if (isValidIp(server)) {
|
||||
return server;
|
||||
}
|
||||
const ips = await resolve4(server);
|
||||
return ips[0];
|
||||
})
|
||||
)
|
||||
: appCfg.ACME_DNS_RESOLVER_SERVERS;
|
||||
resolver.setServers(servers);
|
||||
}
|
||||
|
||||
const recordName = `_acme-challenge.${challenge.auth.identifierValue}`;
|
||||
const records = await resolver.resolveTxt(recordName);
|
||||
const recordValues = records.map((chunks) => chunks.join(""));
|
||||
|
||||
const thumbprint = challenge.auth.account.publicKeyThumbprint;
|
||||
const keyAuthorization = `${challenge.auth.token}.${thumbprint}`;
|
||||
const digest = crypto.nativeCrypto.createHash("sha256").update(keyAuthorization).digest();
|
||||
const expectedChallengeResponseValue = Buffer.from(digest).toString("base64url");
|
||||
|
||||
if (!recordValues.some((recordValue) => recordValue.trim() === expectedChallengeResponseValue)) {
|
||||
throw new AcmeIncorrectResponseError({ message: "ACME DNS-01 challenge response is not correct" });
|
||||
}
|
||||
};
|
||||
|
||||
const handleChallengeValidationError = async (
|
||||
exp: unknown,
|
||||
challenge: ChallengeWithAuth,
|
||||
challengeId: string,
|
||||
retryCount: number
|
||||
): Promise<never> => {
|
||||
let finalAttempt = false;
|
||||
if (retryCount >= 2) {
|
||||
logger.error(
|
||||
exp,
|
||||
`Last attempt to validate ACME challenge response failed, marking ${challengeId} challenge as invalid`
|
||||
);
|
||||
// This is the last attempt to validate the challenge response, if it fails, we mark the challenge as invalid
|
||||
await acmeChallengeDAL.markAsInvalidCascadeById(challengeId);
|
||||
finalAttempt = true;
|
||||
}
|
||||
|
||||
try {
|
||||
// Properly type and inspect the error
|
||||
if (axios.isAxiosError(exp)) {
|
||||
const axiosError = exp as AxiosError;
|
||||
@@ -145,11 +187,80 @@ export const pkiAcmeChallengeServiceFactory = ({
|
||||
throw new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
|
||||
}
|
||||
if (exp instanceof Error) {
|
||||
if ((exp as unknown as { code?: string })?.code === "ENOTFOUND") {
|
||||
throw new AcmeDnsFailureError({ message: "Hostname could not be resolved (DNS failure)" });
|
||||
}
|
||||
logger.error(exp, "Error validating ACME challenge response");
|
||||
throw exp;
|
||||
}
|
||||
logger.error(exp, "Unknown error validating ACME challenge response");
|
||||
throw new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
|
||||
} catch (outterExp) {
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: challenge.auth.account.project.id,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId: challenge.auth.account.profileId,
|
||||
accountId: challenge.auth.account.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: finalAttempt ? EventType.FAIL_ACME_CHALLENGE : EventType.ATTEMPT_ACME_CHALLENGE,
|
||||
metadata: {
|
||||
challengeId,
|
||||
type: challenge.type as AcmeChallengeType,
|
||||
retryCount,
|
||||
errorMessage: exp instanceof Error ? exp.message : "Unknown error"
|
||||
}
|
||||
}
|
||||
});
|
||||
throw outterExp;
|
||||
}
|
||||
};
|
||||
|
||||
const validateChallengeResponse = async (challengeId: string, retryCount: number): Promise<void> => {
|
||||
logger.info({ challengeId, retryCount }, "Validating ACME challenge response");
|
||||
const challenge = await acmeChallengeDAL.findByIdForChallengeValidation(challengeId);
|
||||
if (!challenge) {
|
||||
throw new NotFoundError({ message: "ACME challenge not found" });
|
||||
}
|
||||
if (challenge.status !== AcmeChallengeStatus.Processing) {
|
||||
throw new BadRequestError({
|
||||
message: `ACME challenge is ${challenge.status} instead of ${AcmeChallengeStatus.Processing}`
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
if (challenge.type === AcmeChallengeType.HTTP_01) {
|
||||
await validateHttp01Challenge(challenge);
|
||||
} else if (challenge.type === AcmeChallengeType.DNS_01) {
|
||||
await validateDns01Challenge(challenge);
|
||||
} else {
|
||||
throw new BadRequestError({ message: `Unsupported challenge type: ${challenge.type}` });
|
||||
}
|
||||
|
||||
logger.info({ challengeId }, "ACME challenge response is correct, marking challenge as valid");
|
||||
await acmeChallengeDAL.markAsValidCascadeById(challengeId);
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: challenge.auth.account.project.id,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId: challenge.auth.account.profileId,
|
||||
accountId: challenge.auth.account.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.PASS_ACME_CHALLENGE,
|
||||
metadata: {
|
||||
challengeId,
|
||||
type: challenge.type as AcmeChallengeType
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (exp) {
|
||||
await handleChallengeValidationError(exp, challenge, challengeId, retryCount);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { CertificateRequestStatus } from "@app/services/certificate-request/certificate-request-types";
|
||||
|
||||
export type TPkiAcmeOrderDALFactory = ReturnType<typeof pkiAcmeOrderDALFactory>;
|
||||
|
||||
@@ -19,6 +20,43 @@ export const pkiAcmeOrderDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findWithCertificateRequestForSync = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const order = await (tx || db)(TableName.PkiAcmeOrder)
|
||||
.leftJoin(
|
||||
TableName.CertificateRequests,
|
||||
`${TableName.PkiAcmeOrder}.id`,
|
||||
`${TableName.CertificateRequests}.acmeOrderId`
|
||||
)
|
||||
.select(
|
||||
selectAllTableCols(TableName.PkiAcmeOrder),
|
||||
db.ref("id").withSchema(TableName.CertificateRequests).as("certificateRequestId"),
|
||||
db.ref("status").withSchema(TableName.CertificateRequests).as("certificateRequestStatus"),
|
||||
db.ref("certificateId").withSchema(TableName.CertificateRequests).as("certificateId")
|
||||
)
|
||||
.forUpdate(TableName.PkiAcmeOrder)
|
||||
.where(`${TableName.PkiAcmeOrder}.id`, id)
|
||||
.first();
|
||||
if (!order) {
|
||||
return null;
|
||||
}
|
||||
const { certificateRequestId, certificateRequestStatus, certificateId, ...details } = order;
|
||||
return {
|
||||
...details,
|
||||
certificateRequest:
|
||||
certificateRequestId && certificateRequestStatus
|
||||
? {
|
||||
id: certificateRequestId,
|
||||
status: certificateRequestStatus as CertificateRequestStatus,
|
||||
certificateId
|
||||
}
|
||||
: undefined
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find PKI ACME order by id with certificate request" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByAccountAndOrderIdWithAuthorizations = async (accountId: string, orderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const rows = await (tx || db)(TableName.PkiAcmeOrder)
|
||||
@@ -72,6 +110,7 @@ export const pkiAcmeOrderDALFactory = (db: TDbClient) => {
|
||||
return {
|
||||
...pkiAcmeOrderOrm,
|
||||
findByIdForFinalization,
|
||||
findWithCertificateRequestForSync,
|
||||
findByAccountAndOrderIdWithAuthorizations,
|
||||
listByAccountId
|
||||
};
|
||||
|
||||
@@ -6,8 +6,8 @@ export enum AcmeIdentifierType {
|
||||
|
||||
export enum AcmeOrderStatus {
|
||||
Pending = "pending",
|
||||
Processing = "processing",
|
||||
Ready = "ready",
|
||||
Processing = "processing",
|
||||
Valid = "valid",
|
||||
Invalid = "invalid"
|
||||
}
|
||||
|
||||
@@ -7,8 +7,10 @@ import {
|
||||
importJWK,
|
||||
JWSHeaderParameters
|
||||
} from "jose";
|
||||
import { Knex } from "knex";
|
||||
import { z, ZodError } from "zod";
|
||||
|
||||
import { TPkiAcmeOrders } from "@app/db/schemas";
|
||||
import { TPkiAcmeAccounts } from "@app/db/schemas/pki-acme-accounts";
|
||||
import { TPkiAcmeAuths } from "@app/db/schemas/pki-acme-auths";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
@@ -17,20 +19,15 @@ import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
|
||||
import {
|
||||
CertExtendedKeyUsage,
|
||||
CertKeyUsage,
|
||||
CertSubjectAlternativeNameType
|
||||
} from "@app/services/certificate/certificate-types";
|
||||
import { orderCertificate } from "@app/services/certificate-authority/acme/acme-certificate-authority-fns";
|
||||
import { CertSubjectAlternativeNameType } from "@app/services/certificate/certificate-types";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { TExternalCertificateAuthorityDALFactory } from "@app/services/certificate-authority/external-certificate-authority-dal";
|
||||
import {
|
||||
TCertificateIssuanceQueueFactory,
|
||||
TIssueCertificateFromProfileJobData
|
||||
} from "@app/services/certificate-authority/certificate-issuance-queue";
|
||||
import {
|
||||
extractAlgorithmsFromCSR,
|
||||
extractCertificateRequestFromCSR
|
||||
@@ -40,6 +37,8 @@ import {
|
||||
EnrollmentType,
|
||||
TCertificateProfileWithConfigs
|
||||
} from "@app/services/certificate-profile/certificate-profile-types";
|
||||
import { TCertificateRequestServiceFactory } from "@app/services/certificate-request/certificate-request-service";
|
||||
import { CertificateRequestStatus } from "@app/services/certificate-request/certificate-request-types";
|
||||
import { TCertificateTemplateV2DALFactory } from "@app/services/certificate-template-v2/certificate-template-v2-dal";
|
||||
import { TCertificateTemplateV2ServiceFactory } from "@app/services/certificate-template-v2/certificate-template-v2-service";
|
||||
import { TCertificateV3ServiceFactory } from "@app/services/certificate-v3/certificate-v3-service";
|
||||
@@ -47,6 +46,7 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
|
||||
|
||||
import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPkiAcmeAccountDALFactory } from "./pki-acme-account-dal";
|
||||
import { TPkiAcmeAuthDALFactory } from "./pki-acme-auth-dal";
|
||||
@@ -99,13 +99,9 @@ import {
|
||||
|
||||
type TPkiAcmeServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction" | "findById">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">;
|
||||
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction" | "updateById">;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa">;
|
||||
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "update">;
|
||||
certificateProfileDAL: Pick<TCertificateProfileDALFactory, "findByIdWithOwnerOrgId" | "findByIdWithConfigs">;
|
||||
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "findOne" | "create">;
|
||||
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "findOne" | "create">;
|
||||
certificateTemplateV2DAL: Pick<TCertificateTemplateV2DALFactory, "findById">;
|
||||
acmeAccountDAL: Pick<
|
||||
TPkiAcmeAccountDALFactory,
|
||||
@@ -113,11 +109,13 @@ type TPkiAcmeServiceFactoryDep = {
|
||||
>;
|
||||
acmeOrderDAL: Pick<
|
||||
TPkiAcmeOrderDALFactory,
|
||||
| "findById"
|
||||
| "create"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
| "findByAccountAndOrderIdWithAuthorizations"
|
||||
| "findByIdForFinalization"
|
||||
| "findWithCertificateRequestForSync"
|
||||
| "listByAccountId"
|
||||
>;
|
||||
acmeAuthDAL: Pick<TPkiAcmeAuthDALFactory, "create" | "findByAccountIdAndAuthIdWithChallenges">;
|
||||
@@ -134,19 +132,18 @@ type TPkiAcmeServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
certificateV3Service: Pick<TCertificateV3ServiceFactory, "signCertificateFromProfile">;
|
||||
certificateTemplateV2Service: Pick<TCertificateTemplateV2ServiceFactory, "validateCertificateRequest">;
|
||||
certificateRequestService: Pick<TCertificateRequestServiceFactory, "createCertificateRequest">;
|
||||
certificateIssuanceQueue: Pick<TCertificateIssuanceQueueFactory, "queueCertificateIssuance">;
|
||||
acmeChallengeService: Pick<TPkiAcmeChallengeServiceFactory, "markChallengeAsReady">;
|
||||
pkiAcmeQueueService: Pick<TPkiAcmeQueueServiceFactory, "queueChallengeValidation">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
};
|
||||
|
||||
export const pkiAcmeServiceFactory = ({
|
||||
projectDAL,
|
||||
appConnectionDAL,
|
||||
certificateDAL,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateProfileDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
certificateTemplateV2DAL,
|
||||
acmeAccountDAL,
|
||||
acmeOrderDAL,
|
||||
@@ -158,8 +155,11 @@ export const pkiAcmeServiceFactory = ({
|
||||
licenseService,
|
||||
certificateV3Service,
|
||||
certificateTemplateV2Service,
|
||||
certificateRequestService,
|
||||
certificateIssuanceQueue,
|
||||
acmeChallengeService,
|
||||
pkiAcmeQueueService
|
||||
pkiAcmeQueueService,
|
||||
auditLogService
|
||||
}: TPkiAcmeServiceFactoryDep): TPkiAcmeServiceFactory => {
|
||||
const validateAcmeProfile = async (profileId: string): Promise<TCertificateProfileWithConfigs> => {
|
||||
const profile = await certificateProfileDAL.findByIdWithConfigs(profileId);
|
||||
@@ -364,6 +364,52 @@ export const pkiAcmeServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const checkAndSyncAcmeOrderStatus = async ({ orderId }: { orderId: string }): Promise<TPkiAcmeOrders> => {
|
||||
const order = await acmeOrderDAL.findById(orderId);
|
||||
if (!order) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
if (order.status !== AcmeOrderStatus.Processing) {
|
||||
// We only care about processing orders, as they are the ones that have async certificate requests
|
||||
return order;
|
||||
}
|
||||
return acmeOrderDAL.transaction(async (tx) => {
|
||||
// Lock the order for syncing with async cert request
|
||||
const orderWithCertificateRequest = await acmeOrderDAL.findWithCertificateRequestForSync(orderId, tx);
|
||||
if (!orderWithCertificateRequest) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
// Check the status again after we have acquired the lock, as things may have changed since we last checked
|
||||
if (
|
||||
orderWithCertificateRequest.status !== AcmeOrderStatus.Processing ||
|
||||
!orderWithCertificateRequest.certificateRequest
|
||||
) {
|
||||
return orderWithCertificateRequest;
|
||||
}
|
||||
let newStatus: AcmeOrderStatus | undefined;
|
||||
let newCertificateId: string | undefined;
|
||||
switch (orderWithCertificateRequest.certificateRequest.status) {
|
||||
case CertificateRequestStatus.PENDING:
|
||||
break;
|
||||
case CertificateRequestStatus.ISSUED:
|
||||
newStatus = AcmeOrderStatus.Valid;
|
||||
newCertificateId = orderWithCertificateRequest.certificateRequest.certificateId ?? undefined;
|
||||
break;
|
||||
case CertificateRequestStatus.FAILED:
|
||||
newStatus = AcmeOrderStatus.Invalid;
|
||||
break;
|
||||
default:
|
||||
throw new AcmeServerInternalError({
|
||||
message: `Invalid certificate request status: ${orderWithCertificateRequest.certificateRequest.status as string}`
|
||||
});
|
||||
}
|
||||
if (newStatus) {
|
||||
return acmeOrderDAL.updateById(orderId, { status: newStatus, certificateId: newCertificateId }, tx);
|
||||
}
|
||||
return orderWithCertificateRequest;
|
||||
});
|
||||
};
|
||||
|
||||
const getAcmeDirectory = async (profileId: string): Promise<TGetAcmeDirectoryResponse> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
return {
|
||||
@@ -446,6 +492,23 @@ export const pkiAcmeServiceFactory = ({
|
||||
throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" });
|
||||
}
|
||||
if (existingAccount) {
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_PROFILE,
|
||||
metadata: {
|
||||
profileId: profile.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.RETRIEVE_ACME_ACCOUNT,
|
||||
metadata: {
|
||||
accountId: existingAccount.id,
|
||||
publicKeyThumbprint
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
@@ -518,7 +581,25 @@ export const pkiAcmeServiceFactory = ({
|
||||
publicKeyThumbprint,
|
||||
emails: contact ?? []
|
||||
});
|
||||
// TODO: create audit log here
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_PROFILE,
|
||||
metadata: {
|
||||
profileId: profile.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.CREATE_ACME_ACCOUNT,
|
||||
metadata: {
|
||||
accountId: newAccount.id,
|
||||
publicKeyThumbprint: newAccount.publicKeyThumbprint,
|
||||
emails: newAccount.emails
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
status: 201,
|
||||
body: {
|
||||
@@ -567,6 +648,8 @@ export const pkiAcmeServiceFactory = ({
|
||||
accountId: string;
|
||||
payload: TCreateAcmeOrderPayload;
|
||||
}): Promise<TAcmeResponse<TAcmeOrderResource>> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
const skipDnsOwnershipVerification = profile.acmeConfig?.skipDnsOwnershipVerification ?? false;
|
||||
// TODO: check and see if we have existing orders for this account that meet the criteria
|
||||
// if we do, return the existing order
|
||||
// TODO: check the identifiers and see if are they even allowed for this profile.
|
||||
@@ -592,7 +675,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
const createdOrder = await acmeOrderDAL.create(
|
||||
{
|
||||
accountId: account.id,
|
||||
status: AcmeOrderStatus.Pending,
|
||||
status: skipDnsOwnershipVerification ? AcmeOrderStatus.Ready : AcmeOrderStatus.Pending,
|
||||
notBefore: payload.notBefore ? new Date(payload.notBefore) : undefined,
|
||||
notAfter: payload.notAfter ? new Date(payload.notAfter) : undefined,
|
||||
// TODO: read config from the profile to get the expiration time instead
|
||||
@@ -611,7 +694,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
const auth = await acmeAuthDAL.create(
|
||||
{
|
||||
accountId: account.id,
|
||||
status: AcmeAuthStatus.Pending,
|
||||
status: skipDnsOwnershipVerification ? AcmeAuthStatus.Valid : AcmeAuthStatus.Pending,
|
||||
identifierType: identifier.type,
|
||||
identifierValue: identifier.value,
|
||||
// RFC 8555 suggests a token with at least 128 bits of entropy
|
||||
@@ -623,15 +706,19 @@ export const pkiAcmeServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
// TODO: support other challenge types here. Currently only HTTP-01 is supported.
|
||||
await acmeChallengeDAL.create(
|
||||
{
|
||||
authId: auth.id,
|
||||
status: AcmeChallengeStatus.Pending,
|
||||
type: AcmeChallengeType.HTTP_01
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (!skipDnsOwnershipVerification) {
|
||||
for (const challengeType of [AcmeChallengeType.HTTP_01, AcmeChallengeType.DNS_01]) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await acmeChallengeDAL.create(
|
||||
{
|
||||
authId: auth.id,
|
||||
status: AcmeChallengeStatus.Pending,
|
||||
type: challengeType
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
return auth;
|
||||
})
|
||||
);
|
||||
@@ -643,7 +730,26 @@ export const pkiAcmeServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
// TODO: create audit log here
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId: account.profileId,
|
||||
accountId: account.id
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.CREATE_ACME_ORDER,
|
||||
metadata: {
|
||||
orderId: createdOrder.id,
|
||||
identifiers: authorizations.map((auth) => ({
|
||||
type: auth.identifierType as AcmeIdentifierType,
|
||||
value: auth.identifierValue
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
return { ...createdOrder, authorizations, account };
|
||||
});
|
||||
|
||||
@@ -673,9 +779,12 @@ export const pkiAcmeServiceFactory = ({
|
||||
if (!order) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
// Sync order first in case if there is a certificate request that needs to be processed
|
||||
await checkAndSyncAcmeOrderStatus({ orderId });
|
||||
const updatedOrder = (await acmeOrderDAL.findByAccountAndOrderIdWithAuthorizations(accountId, orderId))!;
|
||||
return {
|
||||
status: 200,
|
||||
body: buildAcmeOrderResource({ profileId, order }),
|
||||
body: buildAcmeOrderResource({ profileId, order: updatedOrder }),
|
||||
headers: {
|
||||
Location: buildUrl(profileId, `/orders/${orderId}`),
|
||||
Link: `<${buildUrl(profileId, "/directory")}>;rel="index"`
|
||||
@@ -683,6 +792,129 @@ export const pkiAcmeServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const processCertificateIssuanceForOrder = async ({
|
||||
caType,
|
||||
accountId,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
orderId,
|
||||
csr,
|
||||
finalizingOrder,
|
||||
certificateRequest,
|
||||
profile,
|
||||
ca,
|
||||
tx
|
||||
}: {
|
||||
caType: CaType;
|
||||
accountId: string;
|
||||
actorOrgId: string;
|
||||
profileId: string;
|
||||
orderId: string;
|
||||
csr: string;
|
||||
finalizingOrder: {
|
||||
notBefore?: Date | null;
|
||||
notAfter?: Date | null;
|
||||
};
|
||||
certificateRequest: ReturnType<typeof extractCertificateRequestFromCSR>;
|
||||
profile: TCertificateProfileWithConfigs;
|
||||
ca: Awaited<ReturnType<typeof certificateAuthorityDAL.findByIdWithAssociatedCa>>;
|
||||
tx?: Knex;
|
||||
}): Promise<{ certificateId?: string; certIssuanceJobData?: TIssueCertificateFromProfileJobData }> => {
|
||||
if (caType === CaType.INTERNAL) {
|
||||
const result = await certificateV3Service.signCertificateFromProfile({
|
||||
actor: ActorType.ACME_ACCOUNT,
|
||||
actorId: accountId,
|
||||
actorAuthMethod: null,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
csr,
|
||||
notBefore: finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : undefined,
|
||||
notAfter: finalizingOrder.notAfter ? new Date(finalizingOrder.notAfter) : undefined,
|
||||
validity: !finalizingOrder.notAfter
|
||||
? {
|
||||
// 47 days, the default TTL comes with Let's Encrypt
|
||||
// TODO: read config from the profile to get the expiration time instead
|
||||
ttl: `${47}d`
|
||||
}
|
||||
: // ttl is not used if notAfter is provided
|
||||
({ ttl: "0d" } as const),
|
||||
enrollmentType: EnrollmentType.ACME
|
||||
});
|
||||
return {
|
||||
certificateId: result.certificateId
|
||||
};
|
||||
}
|
||||
|
||||
const { keyAlgorithm: extractedKeyAlgorithm, signatureAlgorithm: extractedSignatureAlgorithm } =
|
||||
extractAlgorithmsFromCSR(csr);
|
||||
const updatedCertificateRequest = {
|
||||
...certificateRequest,
|
||||
keyAlgorithm: extractedKeyAlgorithm,
|
||||
signatureAlgorithm: extractedSignatureAlgorithm,
|
||||
validity: finalizingOrder.notAfter
|
||||
? (() => {
|
||||
const notBefore = finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : new Date();
|
||||
const notAfter = new Date(finalizingOrder.notAfter);
|
||||
const diffMs = notAfter.getTime() - notBefore.getTime();
|
||||
const diffDays = Math.round(diffMs / (1000 * 60 * 60 * 24));
|
||||
return { ttl: `${diffDays}d` };
|
||||
})()
|
||||
: certificateRequest.validity
|
||||
};
|
||||
|
||||
const template = await certificateTemplateV2DAL.findById(profile.certificateTemplateId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Certificate template not found" });
|
||||
}
|
||||
const validationResult = await certificateTemplateV2Service.validateCertificateRequest(
|
||||
template.id,
|
||||
updatedCertificateRequest
|
||||
);
|
||||
if (!validationResult.isValid) {
|
||||
throw new AcmeBadCSRError({ message: `Invalid CSR: ${validationResult.errors.join(", ")}` });
|
||||
}
|
||||
|
||||
const certRequest = await certificateRequestService.createCertificateRequest({
|
||||
actor: ActorType.ACME_ACCOUNT,
|
||||
actorId: accountId,
|
||||
actorAuthMethod: null,
|
||||
actorOrgId,
|
||||
projectId: profile.projectId,
|
||||
caId: ca.id,
|
||||
profileId: profile.id,
|
||||
commonName: updatedCertificateRequest.commonName ?? "",
|
||||
keyUsages: updatedCertificateRequest.keyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
extendedKeyUsages: updatedCertificateRequest.extendedKeyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
keyAlgorithm: updatedCertificateRequest.keyAlgorithm || "",
|
||||
signatureAlgorithm: updatedCertificateRequest.signatureAlgorithm || "",
|
||||
altNames: updatedCertificateRequest.subjectAlternativeNames?.map((san) => san.value).join(","),
|
||||
notBefore: updatedCertificateRequest.notBefore,
|
||||
notAfter: updatedCertificateRequest.notAfter,
|
||||
status: CertificateRequestStatus.PENDING,
|
||||
acmeOrderId: orderId,
|
||||
csr,
|
||||
tx
|
||||
});
|
||||
const csrObj = new x509.Pkcs10CertificateRequest(csr);
|
||||
const csrPem = csrObj.toString("pem");
|
||||
return {
|
||||
certIssuanceJobData: {
|
||||
certificateId: orderId,
|
||||
profileId: profile.id,
|
||||
caId: profile.caId || "",
|
||||
ttl: updatedCertificateRequest.validity?.ttl || "1y",
|
||||
signatureAlgorithm: updatedCertificateRequest.signatureAlgorithm || "",
|
||||
keyAlgorithm: updatedCertificateRequest.keyAlgorithm || "",
|
||||
commonName: updatedCertificateRequest.commonName || "",
|
||||
altNames: updatedCertificateRequest.subjectAlternativeNames?.map((san) => san.value) || [],
|
||||
keyUsages: updatedCertificateRequest.keyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
extendedKeyUsages: updatedCertificateRequest.extendedKeyUsages?.map((usage) => usage.toString()) ?? [],
|
||||
certificateRequestId: certRequest.id,
|
||||
csr: csrPem
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const finalizeAcmeOrder = async ({
|
||||
profileId,
|
||||
accountId,
|
||||
@@ -707,7 +939,11 @@ export const pkiAcmeServiceFactory = ({
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
if (order.status === AcmeOrderStatus.Ready) {
|
||||
const { order: updatedOrder, error } = await acmeOrderDAL.transaction(async (tx) => {
|
||||
const {
|
||||
order: updatedOrder,
|
||||
error,
|
||||
certIssuanceJobData
|
||||
} = await acmeOrderDAL.transaction(async (tx) => {
|
||||
const finalizingOrder = (await acmeOrderDAL.findByIdForFinalization(orderId, tx))!;
|
||||
// TODO: ideally, this should be doen with onRequest: verifyAuth([AuthMode.ACME_JWS_SIGNATURE]), instead?
|
||||
const { ownerOrgId: actorOrgId } = (await certificateProfileDAL.findByIdWithOwnerOrgId(profileId, tx))!;
|
||||
@@ -754,94 +990,31 @@ export const pkiAcmeServiceFactory = ({
|
||||
}
|
||||
const caType = (ca.externalCa?.type as CaType) ?? CaType.INTERNAL;
|
||||
let errorToReturn: Error | undefined;
|
||||
let certIssuanceJobDataToReturn: TIssueCertificateFromProfileJobData | undefined;
|
||||
try {
|
||||
const { certificateId } = await (async () => {
|
||||
if (caType === CaType.INTERNAL) {
|
||||
const result = await certificateV3Service.signCertificateFromProfile({
|
||||
actor: ActorType.ACME_ACCOUNT,
|
||||
actorId: accountId,
|
||||
actorAuthMethod: null,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
csr,
|
||||
notBefore: finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : undefined,
|
||||
notAfter: finalizingOrder.notAfter ? new Date(finalizingOrder.notAfter) : undefined,
|
||||
validity: !finalizingOrder.notAfter
|
||||
? {
|
||||
// 47 days, the default TTL comes with Let's Encrypt
|
||||
// TODO: read config from the profile to get the expiration time instead
|
||||
ttl: `${47}d`
|
||||
}
|
||||
: // ttl is not used if notAfter is provided
|
||||
({ ttl: "0d" } as const),
|
||||
enrollmentType: EnrollmentType.ACME
|
||||
});
|
||||
return { certificateId: result.certificateId };
|
||||
}
|
||||
const { certificateAuthority } = (await certificateProfileDAL.findByIdWithConfigs(profileId, tx))!;
|
||||
const csrObj = new x509.Pkcs10CertificateRequest(csr);
|
||||
const csrPem = csrObj.toString("pem");
|
||||
|
||||
const { keyAlgorithm: extractedKeyAlgorithm, signatureAlgorithm: extractedSignatureAlgorithm } =
|
||||
extractAlgorithmsFromCSR(csr);
|
||||
|
||||
certificateRequest.keyAlgorithm = extractedKeyAlgorithm;
|
||||
certificateRequest.signatureAlgorithm = extractedSignatureAlgorithm;
|
||||
if (finalizingOrder.notAfter) {
|
||||
const notBefore = finalizingOrder.notBefore ? new Date(finalizingOrder.notBefore) : new Date();
|
||||
const notAfter = new Date(finalizingOrder.notAfter);
|
||||
const diffMs = notAfter.getTime() - notBefore.getTime();
|
||||
const diffDays = Math.round(diffMs / (1000 * 60 * 60 * 24));
|
||||
certificateRequest.validity = { ttl: `${diffDays}d` };
|
||||
}
|
||||
|
||||
const template = await certificateTemplateV2DAL.findById(profile.certificateTemplateId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Certificate template not found" });
|
||||
}
|
||||
const validationResult = await certificateTemplateV2Service.validateCertificateRequest(
|
||||
template.id,
|
||||
certificateRequest
|
||||
);
|
||||
if (!validationResult.isValid) {
|
||||
throw new AcmeBadCSRError({ message: `Invalid CSR: ${validationResult.errors.join(", ")}` });
|
||||
}
|
||||
// TODO: this is pretty slow, and we are holding the transaction open for a long time,
|
||||
// we should queue the certificate issuance to a background job instead
|
||||
const cert = await orderCertificate(
|
||||
{
|
||||
caId: certificateAuthority!.id,
|
||||
// It is possible that the CSR does not have a common name, in which case we use an empty string
|
||||
// (more likely than not for a CSR from a modern ACME client like certbot, cert-manager, etc.)
|
||||
commonName: certificateRequest.commonName ?? "",
|
||||
altNames: certificateRequest.subjectAlternativeNames?.map((san) => san.value),
|
||||
csr: Buffer.from(csrPem),
|
||||
// TODO: not 100% sure what are these columns for, but let's put the values for common website SSL certs for now
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT, CertKeyUsage.KEY_AGREEMENT],
|
||||
extendedKeyUsages: [CertExtendedKeyUsage.SERVER_AUTH]
|
||||
},
|
||||
{
|
||||
appConnectionDAL,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
projectDAL
|
||||
}
|
||||
);
|
||||
return { certificateId: cert.id };
|
||||
})();
|
||||
const result = await processCertificateIssuanceForOrder({
|
||||
caType,
|
||||
accountId,
|
||||
actorOrgId,
|
||||
profileId,
|
||||
orderId,
|
||||
csr,
|
||||
finalizingOrder,
|
||||
certificateRequest,
|
||||
profile,
|
||||
ca,
|
||||
tx
|
||||
});
|
||||
await acmeOrderDAL.updateById(
|
||||
orderId,
|
||||
{
|
||||
status: AcmeOrderStatus.Valid,
|
||||
status: result.certificateId ? AcmeOrderStatus.Valid : AcmeOrderStatus.Processing,
|
||||
csr,
|
||||
certificateId
|
||||
certificateId: result.certificateId
|
||||
},
|
||||
tx
|
||||
);
|
||||
certIssuanceJobDataToReturn = result.certIssuanceJobData;
|
||||
} catch (exp) {
|
||||
await acmeOrderDAL.updateById(
|
||||
orderId,
|
||||
@@ -859,18 +1032,43 @@ export const pkiAcmeServiceFactory = ({
|
||||
} else if (exp instanceof AcmeError) {
|
||||
errorToReturn = exp;
|
||||
} else {
|
||||
errorToReturn = new AcmeServerInternalError({ message: "Failed to sign certificate with internal error" });
|
||||
errorToReturn = new AcmeServerInternalError({
|
||||
message: "Failed to sign certificate with internal error"
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
order: (await acmeOrderDAL.findByAccountAndOrderIdWithAuthorizations(accountId, orderId, tx))!,
|
||||
error: errorToReturn
|
||||
error: errorToReturn,
|
||||
certIssuanceJobData: certIssuanceJobDataToReturn
|
||||
};
|
||||
});
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
if (certIssuanceJobData) {
|
||||
// TODO: ideally, this should be done inside the transaction, but the pg-boss queue doesn't support external transactions
|
||||
// as it seems to be. we need to commit the transaction before queuing the job, otherwise the job will fail (not found error).
|
||||
await certificateIssuanceQueue.queueCertificateIssuance(certIssuanceJobData);
|
||||
}
|
||||
order = updatedOrder;
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId,
|
||||
accountId
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.FINALIZE_ACME_ORDER,
|
||||
metadata: {
|
||||
orderId: updatedOrder.id,
|
||||
csr: updatedOrder.csr!
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (order.status !== AcmeOrderStatus.Valid) {
|
||||
throw new AcmeOrderNotReadyError({ message: "ACME order is not ready" });
|
||||
}
|
||||
@@ -898,14 +1096,16 @@ export const pkiAcmeServiceFactory = ({
|
||||
if (!order) {
|
||||
throw new NotFoundError({ message: "ACME order not found" });
|
||||
}
|
||||
if (order.status !== AcmeOrderStatus.Valid) {
|
||||
// Sync order first in case if there is a certificate request that needs to be processed
|
||||
const syncedOrder = await checkAndSyncAcmeOrderStatus({ orderId });
|
||||
if (syncedOrder.status !== AcmeOrderStatus.Valid) {
|
||||
throw new AcmeOrderNotReadyError({ message: "ACME order is not valid" });
|
||||
}
|
||||
if (!order.certificateId) {
|
||||
if (!syncedOrder.certificateId) {
|
||||
throw new NotFoundError({ message: "The certificate for this ACME order no longer exists" });
|
||||
}
|
||||
|
||||
const certBody = await certificateBodyDAL.findOne({ certId: order.certificateId });
|
||||
const certBody = await certificateBodyDAL.findOne({ certId: syncedOrder.certificateId });
|
||||
const certificateManagerKeyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: profile.projectId,
|
||||
projectDAL,
|
||||
@@ -926,6 +1126,24 @@ export const pkiAcmeServiceFactory = ({
|
||||
|
||||
const certLeaf = certObj.toString("pem").trim().replace("\n", "\r\n");
|
||||
const certChain = certificateChain.trim().replace("\n", "\r\n");
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId,
|
||||
accountId
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.DOWNLOAD_ACME_CERTIFICATE,
|
||||
metadata: {
|
||||
orderId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
status: 200,
|
||||
body:
|
||||
@@ -1008,6 +1226,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
authzId: string;
|
||||
challengeId: string;
|
||||
}): Promise<TAcmeResponse<TRespondToAcmeChallengeResponse>> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
const result = await acmeChallengeDAL.findByAccountAuthAndChallengeId(accountId, authzId, challengeId);
|
||||
if (!result) {
|
||||
throw new NotFoundError({ message: "ACME challenge not found" });
|
||||
@@ -1015,6 +1234,23 @@ export const pkiAcmeServiceFactory = ({
|
||||
await acmeChallengeService.markChallengeAsReady(challengeId);
|
||||
await pkiAcmeQueueService.queueChallengeValidation(challengeId);
|
||||
const challenge = (await acmeChallengeDAL.findByIdForChallengeValidation(challengeId))!;
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: profile.projectId,
|
||||
actor: {
|
||||
type: ActorType.ACME_ACCOUNT,
|
||||
metadata: {
|
||||
profileId,
|
||||
accountId
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.RESPOND_TO_ACME_CHALLENGE,
|
||||
metadata: {
|
||||
challengeId,
|
||||
type: challenge.type as AcmeChallengeType
|
||||
}
|
||||
}
|
||||
});
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
|
||||
@@ -72,7 +72,7 @@ type TScimServiceFactoryDep = {
|
||||
TGroupDALFactory,
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "findAllGroupPossibleMembers"
|
||||
| "findAllGroupPossibleUsers"
|
||||
| "delete"
|
||||
| "findGroups"
|
||||
| "transaction"
|
||||
@@ -952,7 +952,7 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
const users = await groupDAL
|
||||
.findAllGroupPossibleMembers({
|
||||
.findAllGroupPossibleUsers({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id
|
||||
})
|
||||
|
||||
@@ -77,6 +77,7 @@ export enum ApiDocsTags {
|
||||
OidcSso = "OIDC SSO",
|
||||
SamlSso = "SAML SSO",
|
||||
LdapSso = "LDAP SSO",
|
||||
Scim = "SCIM",
|
||||
Events = "Event Subscriptions"
|
||||
}
|
||||
|
||||
@@ -106,6 +107,25 @@ export const GROUPS = {
|
||||
filterUsers:
|
||||
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
|
||||
},
|
||||
LIST_MACHINE_IDENTITIES: {
|
||||
id: "The ID of the group to list identities for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th identity.",
|
||||
limit: "The number of identities to return.",
|
||||
search: "The text string that machine identity name will be filtered by.",
|
||||
filterMachineIdentities:
|
||||
"Whether to filter the list of returned identities. 'assignedMachineIdentities' will only return identities assigned to the group, 'nonAssignedMachineIdentities' will only return identities not assigned to the group, undefined will return all identities in the organization."
|
||||
},
|
||||
LIST_MEMBERS: {
|
||||
id: "The ID of the group to list members for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th member.",
|
||||
limit: "The number of members to return.",
|
||||
search:
|
||||
"The text string that member email(in case of users) or name(in case of machine identities) will be filtered by.",
|
||||
orderBy: "The column to order members by.",
|
||||
orderDirection: "The direction to order members in.",
|
||||
memberTypeFilter:
|
||||
"Filter members by type. Can be a single value ('users' or 'machineIdentities') or an array of values. If not specified, both users and machine identities will be returned."
|
||||
},
|
||||
LIST_PROJECTS: {
|
||||
id: "The ID of the group to list projects for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th project.",
|
||||
@@ -120,12 +140,20 @@ export const GROUPS = {
|
||||
id: "The ID of the group to add the user to.",
|
||||
username: "The username of the user to add to the group."
|
||||
},
|
||||
ADD_MACHINE_IDENTITY: {
|
||||
id: "The ID of the group to add the machine identity to.",
|
||||
machineIdentityId: "The ID of the machine identity to add to the group."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
id: "The ID of the group to fetch."
|
||||
},
|
||||
DELETE_USER: {
|
||||
id: "The ID of the group to remove the user from.",
|
||||
username: "The username of the user to remove from the group."
|
||||
},
|
||||
DELETE_MACHINE_IDENTITY: {
|
||||
id: "The ID of the group to remove the machine identity from.",
|
||||
machineIdentityId: "The ID of the machine identity to remove from the group."
|
||||
}
|
||||
} as const;
|
||||
|
||||
@@ -2522,6 +2550,10 @@ export const AppConnections = {
|
||||
orgName: "The short name of the Chef organization to connect to.",
|
||||
userName: "The username used to access Chef.",
|
||||
privateKey: "The private key used to access Chef."
|
||||
},
|
||||
OCTOPUS_DEPLOY: {
|
||||
instanceUrl: "The Octopus Deploy instance URL to connect to.",
|
||||
apiKey: "The API key used to authenticate with Octopus Deploy."
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -2682,6 +2714,14 @@ export const SecretSyncs = {
|
||||
siteId: "The ID of the Laravel Forge site to sync secrets to.",
|
||||
siteName: "The name of the Laravel Forge site to sync secrets to."
|
||||
},
|
||||
OCTOPUS_DEPLOY: {
|
||||
spaceId: "The ID of the Octopus Deploy space to sync secrets to.",
|
||||
spaceName: "The name of the Octopus Deploy space to sync secrets to.",
|
||||
projectId: "The ID of the Octopus Deploy project to sync secrets to.",
|
||||
projectName: "The name of the Octopus Deploy project to sync secrets to.",
|
||||
scope: "The Octopus Deploy scope that secrets should be synced to.",
|
||||
scopeValues: "The Octopus Deploy scope values that secrets should be synced to."
|
||||
},
|
||||
WINDMILL: {
|
||||
workspace: "The Windmill workspace to sync secrets to.",
|
||||
path: "The Windmill workspace path to sync secrets to."
|
||||
@@ -3124,6 +3164,13 @@ export const LdapSso = {
|
||||
}
|
||||
};
|
||||
|
||||
export const Scim = {
|
||||
UPDATE_GROUP_ORG_ROLE_MAPPINGS: {
|
||||
groupName: "The name of the group in the SCIM provider.",
|
||||
roleSlug: "The slug of the role that group members should be assigned when provisioned."
|
||||
}
|
||||
};
|
||||
|
||||
export const EventSubscriptions = {
|
||||
SUBSCRIBE_PROJECT_EVENTS: {
|
||||
projectId: "The ID of the project to subscribe to events for.",
|
||||
|
||||
@@ -119,6 +119,16 @@ const envSchema = z
|
||||
})
|
||||
.default("{}")
|
||||
),
|
||||
ACME_DNS_RESOLVER_SERVERS: zpStr(
|
||||
z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => {
|
||||
if (!val) return [];
|
||||
return val.split(",");
|
||||
})
|
||||
),
|
||||
ACME_DNS_RESOLVE_RESOLVER_SERVERS_HOST_ENABLED: zodStrBool.default("false").optional(),
|
||||
DNS_MADE_EASY_SANDBOX_ENABLED: zodStrBool.default("false").optional(),
|
||||
// smtp options
|
||||
SMTP_HOST: zpStr(z.string().optional()),
|
||||
@@ -229,6 +239,7 @@ const envSchema = z
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
CAPTCHA_SITE_KEY: zpStr(z.string().optional()),
|
||||
INTERCOM_ID: zpStr(z.string().optional()),
|
||||
CDN_HOST: zpStr(z.string().optional()),
|
||||
|
||||
// TELEMETRY
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||
|
||||
@@ -103,3 +103,98 @@ export const deepEqualSkipFields = (obj1: unknown, obj2: unknown, skipFields: st
|
||||
|
||||
return deepEqual(filtered1, filtered2);
|
||||
};
|
||||
|
||||
export const deterministicStringify = (value: unknown): string => {
|
||||
if (value === null || value === undefined) {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
const items = value.map((item) => deterministicStringify(item));
|
||||
return `[${items.join(",")}]`;
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
const sortedKeys = Object.keys(value).sort();
|
||||
const sortedObj: Record<string, unknown> = {};
|
||||
for (const key of sortedKeys) {
|
||||
const val = (value as Record<string, unknown>)[key];
|
||||
if (typeof val === "object" && val !== null) {
|
||||
sortedObj[key] = JSON.parse(deterministicStringify(val));
|
||||
} else {
|
||||
sortedObj[key] = val;
|
||||
}
|
||||
}
|
||||
return JSON.stringify(sortedObj);
|
||||
}
|
||||
|
||||
return JSON.stringify(value);
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively extracts all field paths from a nested object structure.
|
||||
* Returns an array of dot-notation paths (e.g., ["password", "username", "field.nestedField"])
|
||||
*/
|
||||
export const extractObjectFieldPaths = (obj: unknown, prefix = ""): string[] => {
|
||||
const paths: string[] = [];
|
||||
|
||||
if (obj === null || obj === undefined) {
|
||||
return paths;
|
||||
}
|
||||
|
||||
if (typeof obj !== "object") {
|
||||
// return the path if it exists
|
||||
if (prefix) {
|
||||
paths.push(prefix);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
// for arrays, we log the array itself and optionally nested paths
|
||||
if (prefix) {
|
||||
paths.push(prefix);
|
||||
}
|
||||
// we just want to know the array field changed
|
||||
obj.forEach((item, index) => {
|
||||
if (typeof item === "object" && item !== null) {
|
||||
const nestedPaths = extractObjectFieldPaths(item, `${prefix}[${index}]`);
|
||||
paths.push(...nestedPaths);
|
||||
}
|
||||
});
|
||||
return paths;
|
||||
}
|
||||
|
||||
// for objects, extract all keys and recurse
|
||||
const keys = Object.keys(obj);
|
||||
if (keys.length === 0 && prefix) {
|
||||
// empty object with prefix
|
||||
paths.push(prefix);
|
||||
}
|
||||
|
||||
keys.forEach((key) => {
|
||||
const currentPath = prefix ? `${prefix}.${key}` : key;
|
||||
const value = (obj as Record<string, unknown>)[key];
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
paths.push(currentPath);
|
||||
} else if (typeof value === "object") {
|
||||
// recurse into nested objects/arrays
|
||||
const nestedPaths = extractObjectFieldPaths(value, currentPath);
|
||||
if (nestedPaths.length === 0) {
|
||||
// if nested object is empty, add the path itself
|
||||
paths.push(currentPath);
|
||||
} else {
|
||||
paths.push(...nestedPaths);
|
||||
}
|
||||
} else {
|
||||
paths.push(currentPath);
|
||||
}
|
||||
});
|
||||
|
||||
return paths;
|
||||
};
|
||||
|
||||
@@ -33,3 +33,7 @@ export const sanitizeString = (dto: { unsanitizedString: string; tokens: string[
|
||||
});
|
||||
return sanitizedWords.join("");
|
||||
};
|
||||
|
||||
export const sanitizeSqlLikeString = (value: string): string => {
|
||||
return String(value).replace(new RE2("[%_\\\\]", "g"), "\\$&");
|
||||
};
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
import staticServe from "@fastify/static";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
|
||||
@@ -15,6 +17,9 @@ export const registerServeUI = async (
|
||||
dir: string;
|
||||
}
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
const cdnHost = appCfg.CDN_HOST || "";
|
||||
|
||||
// use this only for frontend runtime static non-sensitive configuration in standalone mode
|
||||
// that app needs before loading like posthog dsn key
|
||||
// for most of the other usecase use server config
|
||||
@@ -25,15 +30,26 @@ export const registerServeUI = async (
|
||||
hide: true
|
||||
},
|
||||
handler: (_req, res) => {
|
||||
const appCfg = getConfig();
|
||||
void res.type("application/javascript");
|
||||
const config = {
|
||||
CAPTCHA_SITE_KEY: appCfg.CAPTCHA_SITE_KEY,
|
||||
POSTHOG_API_KEY: appCfg.POSTHOG_PROJECT_API_KEY,
|
||||
INTERCOM_ID: appCfg.INTERCOM_ID,
|
||||
TELEMETRY_CAPTURING_ENABLED: appCfg.TELEMETRY_ENABLED
|
||||
TELEMETRY_CAPTURING_ENABLED: appCfg.TELEMETRY_ENABLED,
|
||||
CDN_HOST: cdnHost
|
||||
};
|
||||
const js = `window.__INFISICAL_RUNTIME_ENV__ = Object.freeze(${JSON.stringify(config)});`;
|
||||
// Define window.__toCdnUrl for Vite's experimental.renderBuiltUrl runtime support
|
||||
// This function is called by dynamically imported chunks to resolve CDN URLs
|
||||
const js = `
|
||||
window.__INFISICAL_RUNTIME_ENV__ = Object.freeze(${JSON.stringify(config)});
|
||||
window.__toCdnUrl = function(filename) {
|
||||
var cdnHost = window.__INFISICAL_RUNTIME_ENV__.CDN_HOST || "";
|
||||
if (cdnHost && filename.startsWith("assets/")) {
|
||||
return cdnHost + "/" + filename;
|
||||
}
|
||||
return "/" + filename;
|
||||
};
|
||||
`.trim();
|
||||
return res.send(js);
|
||||
}
|
||||
});
|
||||
@@ -41,11 +57,30 @@ export const registerServeUI = async (
|
||||
if (standaloneMode) {
|
||||
const frontendName = IS_PACKAGED ? "frontend" : "frontend-build";
|
||||
const frontendPath = path.join(dir, frontendName);
|
||||
|
||||
const indexHtmlPath = path.join(frontendPath, "index.html");
|
||||
let indexHtml = fs.readFileSync(indexHtmlPath, "utf-8");
|
||||
|
||||
if (cdnHost) {
|
||||
// Replace relative asset paths with CDN URLs in script and link tags
|
||||
indexHtml = indexHtml
|
||||
.replace(/src="\/assets\//g, `src="${cdnHost}/assets/`)
|
||||
.replace(/href="\/assets\//g, `href="${cdnHost}/assets/`);
|
||||
|
||||
// Inject CDN host into CSP directives that need it
|
||||
const cspDirectives = ["script-src", "style-src", "font-src", "connect-src", "media-src"];
|
||||
for (const directive of cspDirectives) {
|
||||
const regex = new RE2(`(${directive}\\s+'self')`, "g");
|
||||
indexHtml = indexHtml.replace(regex, `$1 ${cdnHost}`);
|
||||
}
|
||||
}
|
||||
|
||||
await server.register(staticServe, {
|
||||
root: frontendPath,
|
||||
wildcard: false,
|
||||
maxAge: "30d",
|
||||
immutable: true
|
||||
immutable: true,
|
||||
index: false
|
||||
});
|
||||
|
||||
server.route({
|
||||
@@ -60,12 +95,12 @@ export const registerServeUI = async (
|
||||
return;
|
||||
}
|
||||
|
||||
return reply.sendFile("index.html", {
|
||||
immutable: false,
|
||||
maxAge: 0,
|
||||
lastModified: false,
|
||||
etag: false
|
||||
});
|
||||
return reply
|
||||
.type("text/html")
|
||||
.header("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
.header("Pragma", "no-cache")
|
||||
.header("Expires", "0")
|
||||
.send(indexHtml);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@ import { githubOrgSyncDALFactory } from "@app/ee/services/github-org-sync/github
|
||||
import { githubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
||||
import { groupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { identityGroupMembershipDALFactory } from "@app/ee/services/group/identity-group-membership-dal";
|
||||
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { isHsmActiveAndEnabled } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
@@ -470,6 +471,7 @@ export const registerRoutes = async (
|
||||
const identityMetadataDAL = identityMetadataDALFactory(db);
|
||||
const identityAccessTokenDAL = identityAccessTokenDALFactory(db);
|
||||
const identityOrgMembershipDAL = identityOrgDALFactory(db);
|
||||
const identityGroupMembershipDAL = identityGroupMembershipDALFactory(db);
|
||||
const identityProjectDAL = identityProjectDALFactory(db);
|
||||
const identityAuthTemplateDAL = identityAuthTemplateDALFactory(db);
|
||||
|
||||
@@ -754,6 +756,9 @@ export const registerRoutes = async (
|
||||
membershipGroupDAL
|
||||
});
|
||||
const groupService = groupServiceFactory({
|
||||
identityDAL,
|
||||
membershipDAL,
|
||||
identityGroupMembershipDAL,
|
||||
userDAL,
|
||||
groupDAL,
|
||||
orgDAL,
|
||||
@@ -1356,10 +1361,7 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
projectDAL,
|
||||
projectSshConfigDAL,
|
||||
secretDAL,
|
||||
secretV2BridgeDAL,
|
||||
projectQueue: projectQueueService,
|
||||
projectBotService,
|
||||
userDAL,
|
||||
projectEnvDAL,
|
||||
orgDAL,
|
||||
@@ -1386,7 +1388,6 @@ export const registerRoutes = async (
|
||||
microsoftTeamsIntegrationDAL,
|
||||
projectTemplateService,
|
||||
smtpService,
|
||||
reminderService,
|
||||
notificationService,
|
||||
membershipGroupDAL,
|
||||
membershipIdentityDAL,
|
||||
@@ -2303,7 +2304,8 @@ export const registerRoutes = async (
|
||||
});
|
||||
|
||||
const acmeChallengeService = pkiAcmeChallengeServiceFactory({
|
||||
acmeChallengeDAL
|
||||
acmeChallengeDAL,
|
||||
auditLogService
|
||||
});
|
||||
|
||||
const pkiAcmeQueueService = await pkiAcmeQueueServiceFactory({
|
||||
@@ -2313,13 +2315,9 @@ export const registerRoutes = async (
|
||||
|
||||
const pkiAcmeService = pkiAcmeServiceFactory({
|
||||
projectDAL,
|
||||
appConnectionDAL,
|
||||
certificateDAL,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateProfileDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
certificateTemplateV2DAL,
|
||||
acmeAccountDAL,
|
||||
acmeOrderDAL,
|
||||
@@ -2331,8 +2329,11 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
certificateV3Service,
|
||||
certificateTemplateV2Service,
|
||||
certificateRequestService,
|
||||
certificateIssuanceQueue,
|
||||
acmeChallengeService,
|
||||
pkiAcmeQueueService
|
||||
pkiAcmeQueueService,
|
||||
auditLogService
|
||||
});
|
||||
|
||||
const pkiSubscriberService = pkiSubscriberServiceFactory({
|
||||
|
||||
@@ -101,6 +101,10 @@ import {
|
||||
NorthflankConnectionListItemSchema,
|
||||
SanitizedNorthflankConnectionSchema
|
||||
} from "@app/services/app-connection/northflank";
|
||||
import {
|
||||
OctopusDeployConnectionListItemSchema,
|
||||
SanitizedOctopusDeployConnectionSchema
|
||||
} from "@app/services/app-connection/octopus-deploy";
|
||||
import { OktaConnectionListItemSchema, SanitizedOktaConnectionSchema } from "@app/services/app-connection/okta";
|
||||
import {
|
||||
PostgresConnectionListItemSchema,
|
||||
@@ -180,7 +184,8 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedMongoDBConnectionSchema.options,
|
||||
...SanitizedLaravelForgeConnectionSchema.options,
|
||||
...SanitizedChefConnectionSchema.options,
|
||||
...SanitizedDNSMadeEasyConnectionSchema.options
|
||||
...SanitizedDNSMadeEasyConnectionSchema.options,
|
||||
...SanitizedOctopusDeployConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@@ -227,7 +232,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
MongoDBConnectionListItemSchema,
|
||||
LaravelForgeConnectionListItemSchema,
|
||||
ChefConnectionListItemSchema,
|
||||
DNSMadeEasyConnectionListItemSchema
|
||||
DNSMadeEasyConnectionListItemSchema,
|
||||
OctopusDeployConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
@@ -33,6 +33,7 @@ import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
|
||||
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
|
||||
import { registerNetlifyConnectionRouter } from "./netlify-connection-router";
|
||||
import { registerNorthflankConnectionRouter } from "./northflank-connection-router";
|
||||
import { registerOctopusDeployConnectionRouter } from "./octopus-deploy-connection-router";
|
||||
import { registerOktaConnectionRouter } from "./okta-connection-router";
|
||||
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
|
||||
import { registerRailwayConnectionRouter } from "./railway-connection-router";
|
||||
@@ -92,5 +93,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.Okta]: registerOktaConnectionRouter,
|
||||
[AppConnection.Redis]: registerRedisConnectionRouter,
|
||||
[AppConnection.MongoDB]: registerMongoDBConnectionRouter,
|
||||
[AppConnection.Chef]: registerChefConnectionRouter
|
||||
[AppConnection.Chef]: registerChefConnectionRouter,
|
||||
[AppConnection.OctopusDeploy]: registerOctopusDeployConnectionRouter
|
||||
};
|
||||
|
||||
@@ -0,0 +1,168 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateOctopusDeployConnectionSchema,
|
||||
SanitizedOctopusDeployConnectionSchema,
|
||||
UpdateOctopusDeployConnectionSchema
|
||||
} from "@app/services/app-connection/octopus-deploy";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerOctopusDeployConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.OctopusDeploy,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedOctopusDeployConnectionSchema,
|
||||
createSchema: CreateOctopusDeployConnectionSchema,
|
||||
updateSchema: UpdateOctopusDeployConnectionSchema
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/spaces`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
isDefault: z.boolean()
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const spaces = await server.services.appConnection.octopusDeploy.listSpaces(connectionId, req.permission);
|
||||
|
||||
return spaces;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/projects`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
spaceId: z.string().min(1, "Space ID is required")
|
||||
}),
|
||||
response: {
|
||||
200: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
const { spaceId } = req.query;
|
||||
|
||||
const projects = await server.services.appConnection.octopusDeploy.listProjects(
|
||||
connectionId,
|
||||
spaceId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
return projects;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/scope-values`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
spaceId: z.string().min(1, "Space ID is required"),
|
||||
projectId: z.string().min(1, "Project ID is required")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
environments: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array(),
|
||||
roles: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array(),
|
||||
machines: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array(),
|
||||
processes: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array(),
|
||||
actions: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array(),
|
||||
channels: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
const { spaceId, projectId } = req.query;
|
||||
|
||||
const scopeValues = await server.services.appConnection.octopusDeploy.getScopeValues(
|
||||
connectionId,
|
||||
spaceId,
|
||||
projectId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
if (!scopeValues) {
|
||||
throw new BadRequestError({ message: "Unable to get Octopus Deploy scope values" });
|
||||
}
|
||||
|
||||
return scopeValues;
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -7,6 +7,7 @@ import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ApprovalPolicyType } from "@app/services/approval-policy/approval-policy-enums";
|
||||
import {
|
||||
TApprovalPolicy,
|
||||
TApprovalPolicyInputs,
|
||||
TCreatePolicyDTO,
|
||||
TCreateRequestDTO,
|
||||
TUpdatePolicyDTO
|
||||
@@ -21,7 +22,8 @@ export const registerApprovalPolicyEndpoints = <P extends TApprovalPolicy>({
|
||||
policyResponseSchema,
|
||||
createRequestSchema,
|
||||
requestResponseSchema,
|
||||
grantResponseSchema
|
||||
grantResponseSchema,
|
||||
inputsSchema
|
||||
}: {
|
||||
server: FastifyZodProvider;
|
||||
policyType: ApprovalPolicyType;
|
||||
@@ -41,6 +43,7 @@ export const registerApprovalPolicyEndpoints = <P extends TApprovalPolicy>({
|
||||
createRequestSchema: z.ZodType<TCreateRequestDTO>;
|
||||
requestResponseSchema: z.ZodTypeAny;
|
||||
grantResponseSchema: z.ZodTypeAny;
|
||||
inputsSchema: z.ZodType<TApprovalPolicyInputs>;
|
||||
}) => {
|
||||
// Policies
|
||||
server.route({
|
||||
@@ -622,4 +625,31 @@ export const registerApprovalPolicyEndpoints = <P extends TApprovalPolicy>({
|
||||
return { grant };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/check-policy-match",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Check if a resource path matches any approval policy and if the user has an active grant",
|
||||
body: z.object({
|
||||
projectId: z.string().uuid(),
|
||||
inputs: inputsSchema
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
requiresApproval: z.boolean(),
|
||||
hasActiveGrant: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.approvalPolicy.checkPolicyMatch(policyType, req.body, req.permission);
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -2,6 +2,7 @@ import { ApprovalPolicyType } from "@app/services/approval-policy/approval-polic
|
||||
import {
|
||||
CreatePamAccessPolicySchema,
|
||||
CreatePamAccessRequestSchema,
|
||||
PamAccessPolicyInputsSchema,
|
||||
PamAccessPolicySchema,
|
||||
PamAccessRequestGrantSchema,
|
||||
PamAccessRequestSchema,
|
||||
@@ -23,7 +24,8 @@ export const APPROVAL_POLICY_REGISTER_ROUTER_MAP: Record<
|
||||
policyResponseSchema: PamAccessPolicySchema,
|
||||
createRequestSchema: CreatePamAccessRequestSchema,
|
||||
requestResponseSchema: PamAccessRequestSchema,
|
||||
grantResponseSchema: PamAccessRequestGrantSchema
|
||||
grantResponseSchema: PamAccessRequestGrantSchema,
|
||||
inputsSchema: PamAccessPolicyInputsSchema
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -47,7 +47,11 @@ export const registerCertificateProfilesRouter = async (server: FastifyZodProvid
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z.object({}).optional(),
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional(),
|
||||
externalConfigs: ExternalConfigUnionSchema
|
||||
})
|
||||
.refine(
|
||||
@@ -245,7 +249,8 @@ export const registerCertificateProfilesRouter = async (server: FastifyZodProvid
|
||||
acmeConfig: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
directoryUrl: z.string()
|
||||
directoryUrl: z.string(),
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional(),
|
||||
externalConfigs: ExternalConfigUnionSchema
|
||||
@@ -434,6 +439,11 @@ export const registerCertificateProfilesRouter = async (server: FastifyZodProvid
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional(),
|
||||
externalConfigs: ExternalConfigUnionSchema
|
||||
})
|
||||
.refine(
|
||||
|
||||
@@ -25,6 +25,7 @@ import { EnrollmentType } from "@app/services/certificate-profile/certificate-pr
|
||||
import { CertificateRequestStatus } from "@app/services/certificate-request/certificate-request-types";
|
||||
import { validateTemplateRegexField } from "@app/services/certificate-template/certificate-template-validators";
|
||||
import { TCertificateFromProfileResponse } from "@app/services/certificate-v3/certificate-v3-types";
|
||||
import { ProjectFilterType } from "@app/services/project/project-types";
|
||||
|
||||
import { booleanSchema } from "../sanitizedSchemas";
|
||||
|
||||
@@ -353,6 +354,123 @@ export const registerCertificateRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/certificate-requests",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.PkiCertificates],
|
||||
querystring: z.object({
|
||||
projectSlug: z.string().min(1).trim(),
|
||||
offset: z.coerce.number().min(0).default(0),
|
||||
limit: z.coerce.number().min(1).max(100).default(20),
|
||||
search: z.string().trim().optional(),
|
||||
status: z.nativeEnum(CertificateRequestStatus).optional(),
|
||||
fromDate: z.coerce.date().optional(),
|
||||
toDate: z.coerce.date().optional(),
|
||||
profileIds: z
|
||||
.string()
|
||||
.transform((val) => val.split(",").map((id) => id.trim()))
|
||||
.pipe(z.array(z.string().uuid()))
|
||||
.optional()
|
||||
.describe("Comma-separated list of profile IDs"),
|
||||
sortBy: z.string().trim().optional(),
|
||||
sortOrder: z.enum(["asc", "desc"]).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificateRequests: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
status: z.nativeEnum(CertificateRequestStatus),
|
||||
commonName: z.string().nullable(),
|
||||
altNames: z.string().nullable(),
|
||||
profileId: z.string().nullable(),
|
||||
profileName: z.string().nullable(),
|
||||
caId: z.string().nullable(),
|
||||
certificateId: z.string().nullable(),
|
||||
errorMessage: z.string().nullable(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
certificate: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
serialNumber: z.string(),
|
||||
status: z.string()
|
||||
})
|
||||
.nullable()
|
||||
})
|
||||
),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const project = await server.services.project.getAProject({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
filter: {
|
||||
type: ProjectFilterType.SLUG,
|
||||
slug: req.query.projectSlug,
|
||||
orgId: req.permission.orgId
|
||||
}
|
||||
});
|
||||
|
||||
const { certificateRequests, totalCount } = await server.services.certificateRequest.listCertificateRequests({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: project.id,
|
||||
offset: req.query.offset,
|
||||
limit: req.query.limit,
|
||||
search: req.query.search,
|
||||
status: req.query.status,
|
||||
fromDate: req.query.fromDate,
|
||||
toDate: req.query.toDate,
|
||||
profileIds: req.query.profileIds,
|
||||
sortBy: req.query.sortBy,
|
||||
sortOrder: req.query.sortOrder
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: project.id,
|
||||
event: {
|
||||
type: EventType.LIST_CERTIFICATE_REQUESTS,
|
||||
metadata: {
|
||||
offset: req.query.offset,
|
||||
limit: req.query.limit,
|
||||
search: req.query.search,
|
||||
status: req.query.status,
|
||||
count: certificateRequests.length,
|
||||
certificateRequestIds: certificateRequests.map((certReq) => certReq.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificateRequests: certificateRequests.map((certReq) => ({
|
||||
...certReq,
|
||||
profileId: certReq.profileId ?? null,
|
||||
caId: certReq.caId ?? null,
|
||||
certificateId: certReq.certificateId ?? null,
|
||||
commonName: certReq.commonName ?? null,
|
||||
altNames: certReq.altNames ?? null,
|
||||
errorMessage: certReq.errorMessage ?? null,
|
||||
profileName: certReq.profileName ?? null
|
||||
})),
|
||||
totalCount
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/issue-certificate",
|
||||
|
||||
@@ -359,6 +359,21 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
// get the count of unique dynamic secret names to properly adjust remaining limit
|
||||
const uniqueDynamicSecretsCount = new Set(dynamicSecrets.map((dynamicSecret) => dynamicSecret.name)).size;
|
||||
|
||||
if (dynamicSecrets.length) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.LIST_DYNAMIC_SECRETS,
|
||||
metadata: {
|
||||
environment: [...new Set(dynamicSecrets.map((dynamicSecret) => dynamicSecret.environment))].join(","),
|
||||
secretPath,
|
||||
projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
remainingLimit -= uniqueDynamicSecretsCount;
|
||||
adjustedOffset = 0;
|
||||
} else {
|
||||
@@ -738,7 +753,9 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
reminder: Awaited<ReturnType<typeof server.services.reminder.getRemindersForDashboard>>[string] | null;
|
||||
})[]
|
||||
| undefined;
|
||||
let dynamicSecrets: Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByEnv>> | undefined;
|
||||
let dynamicSecrets:
|
||||
| Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByEnv>>["dynamicSecrets"]
|
||||
| undefined;
|
||||
let secretRotations:
|
||||
| (Awaited<ReturnType<typeof server.services.secretRotationV2.getDashboardSecretRotations>>[number] & {
|
||||
secrets: (NonNullable<
|
||||
@@ -923,7 +940,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
if (remainingLimit > 0 && totalDynamicSecretCount > adjustedOffset) {
|
||||
dynamicSecrets = await server.services.dynamicSecret.listDynamicSecretsByEnv({
|
||||
const { dynamicSecrets: dynamicSecretCfgs } = await server.services.dynamicSecret.listDynamicSecretsByEnv({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@@ -938,6 +955,23 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
offset: adjustedOffset
|
||||
});
|
||||
|
||||
if (dynamicSecretCfgs.length) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.LIST_DYNAMIC_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
dynamicSecrets = dynamicSecretCfgs;
|
||||
|
||||
remainingLimit -= dynamicSecrets.length;
|
||||
adjustedOffset = 0;
|
||||
} else {
|
||||
@@ -1263,6 +1297,27 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
const sliceQuickSearch = <T>(array: T[]) => array.slice(0, 25);
|
||||
|
||||
const filteredDynamicSecrets = sliceQuickSearch(
|
||||
searchPath ? dynamicSecrets.filter((dynamicSecret) => dynamicSecret.path.endsWith(searchPath)) : dynamicSecrets
|
||||
);
|
||||
|
||||
if (filteredDynamicSecrets?.length) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.LIST_DYNAMIC_SECRETS,
|
||||
metadata: {
|
||||
environment: [...new Set(filteredDynamicSecrets.map((dynamicSecret) => dynamicSecret.environment))].join(
|
||||
","
|
||||
),
|
||||
secretPath: [...new Set(filteredDynamicSecrets.map((dynamicSecret) => dynamicSecret.path))].join(","),
|
||||
projectId
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
secrets: sliceQuickSearch(
|
||||
searchPath ? secrets.filter((secret) => secret.secretPath.endsWith(searchPath)) : secrets
|
||||
|
||||
@@ -2,6 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { ExternalGroupOrgRoleMappingsSchema } from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApiDocsTags, Scim } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@@ -16,6 +17,8 @@ export const registerExternalGroupOrgRoleMappingRouter = async (server: FastifyZ
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Scim],
|
||||
response: {
|
||||
200: ExternalGroupOrgRoleMappingsSchema.array()
|
||||
}
|
||||
@@ -44,11 +47,13 @@ export const registerExternalGroupOrgRoleMappingRouter = async (server: FastifyZ
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Scim],
|
||||
body: z.object({
|
||||
mappings: z
|
||||
.object({
|
||||
groupName: z.string().trim().min(1),
|
||||
roleSlug: slugSchema({ max: 64 })
|
||||
groupName: z.string().trim().min(1).describe(Scim.UPDATE_GROUP_ORG_ROLE_MAPPINGS.groupName),
|
||||
roleSlug: slugSchema({ max: 64 }).describe(Scim.UPDATE_GROUP_ORG_ROLE_MAPPINGS.roleSlug)
|
||||
})
|
||||
.array()
|
||||
}),
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
TemporaryPermissionMode,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { FilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
@@ -355,9 +355,10 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
hide: true,
|
||||
deprecated: true,
|
||||
tags: [ApiDocsTags.ProjectGroups],
|
||||
description: "Return project group users",
|
||||
description: "Return project group users (Deprecated: Use /api/v1/groups/{id}/users instead)",
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(GROUPS.LIST_USERS.projectId),
|
||||
groupId: z.string().trim().describe(GROUPS.LIST_USERS.id)
|
||||
@@ -367,7 +368,7 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
filter: z.nativeEnum(FilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
||||
@@ -236,7 +236,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerUserEngagementRouter, { prefix: "/user-engagement" });
|
||||
await server.register(registerDashboardRouter, { prefix: "/dashboard" });
|
||||
await server.register(registerCmekRouter, { prefix: "/kms" });
|
||||
await server.register(registerExternalGroupOrgRoleMappingRouter, { prefix: "/external-group-mappings" });
|
||||
await server.register(registerExternalGroupOrgRoleMappingRouter, { prefix: "/scim/group-org-role-mappings" });
|
||||
|
||||
await server.register(
|
||||
async (appConnectionRouter) => {
|
||||
|
||||
@@ -1209,7 +1209,16 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
.boolean()
|
||||
.default(false)
|
||||
.optional()
|
||||
.describe("Retrieve only certificates available for PKI sync")
|
||||
.describe("Retrieve only certificates available for PKI sync"),
|
||||
search: z.string().trim().optional().describe("Search by SAN, CN, certificate ID, or serial number"),
|
||||
status: z.string().optional().describe("Filter by certificate status"),
|
||||
profileIds: z
|
||||
.union([z.string().uuid(), z.array(z.string().uuid())])
|
||||
.transform((val) => (Array.isArray(val) ? val : [val]))
|
||||
.optional()
|
||||
.describe("Filter by profile IDs"),
|
||||
fromDate: z.coerce.date().optional().describe("Filter certificates created from this date"),
|
||||
toDate: z.coerce.date().optional().describe("Filter certificates created until this date")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
||||
@@ -25,6 +25,7 @@ import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
|
||||
import { registerLaravelForgeSyncRouter } from "./laravel-forge-sync-router";
|
||||
import { registerNetlifySyncRouter } from "./netlify-sync-router";
|
||||
import { registerNorthflankSyncRouter } from "./northflank-sync-router";
|
||||
import { registerOctopusDeploySyncRouter } from "./octopus-deploy-sync-router";
|
||||
import { registerRailwaySyncRouter } from "./railway-sync-router";
|
||||
import { registerRenderSyncRouter } from "./render-sync-router";
|
||||
import { registerSupabaseSyncRouter } from "./supabase-sync-router";
|
||||
@@ -69,5 +70,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
||||
[SecretSync.Northflank]: registerNorthflankSyncRouter,
|
||||
[SecretSync.Bitbucket]: registerBitbucketSyncRouter,
|
||||
[SecretSync.LaravelForge]: registerLaravelForgeSyncRouter,
|
||||
[SecretSync.Chef]: registerChefSyncRouter
|
||||
[SecretSync.Chef]: registerChefSyncRouter,
|
||||
[SecretSync.OctopusDeploy]: registerOctopusDeploySyncRouter
|
||||
};
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
import {
|
||||
CreateOctopusDeploySyncSchema,
|
||||
OctopusDeploySyncSchema,
|
||||
UpdateOctopusDeploySyncSchema
|
||||
} from "@app/services/secret-sync/octopus-deploy";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
|
||||
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||
|
||||
export const registerOctopusDeploySyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncSecretsEndpoints({
|
||||
destination: SecretSync.OctopusDeploy,
|
||||
server,
|
||||
responseSchema: OctopusDeploySyncSchema,
|
||||
createSchema: CreateOctopusDeploySyncSchema,
|
||||
updateSchema: UpdateOctopusDeploySyncSchema
|
||||
});
|
||||
@@ -48,6 +48,7 @@ import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/
|
||||
import { LaravelForgeSyncListItemSchema, LaravelForgeSyncSchema } from "@app/services/secret-sync/laravel-forge";
|
||||
import { NetlifySyncListItemSchema, NetlifySyncSchema } from "@app/services/secret-sync/netlify";
|
||||
import { NorthflankSyncListItemSchema, NorthflankSyncSchema } from "@app/services/secret-sync/northflank";
|
||||
import { OctopusDeploySyncListItemSchema, OctopusDeploySyncSchema } from "@app/services/secret-sync/octopus-deploy";
|
||||
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
|
||||
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
|
||||
import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase";
|
||||
@@ -90,7 +91,8 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
||||
NorthflankSyncSchema,
|
||||
BitbucketSyncSchema,
|
||||
LaravelForgeSyncSchema,
|
||||
ChefSyncSchema
|
||||
ChefSyncSchema,
|
||||
OctopusDeploySyncSchema
|
||||
]);
|
||||
|
||||
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
@@ -126,7 +128,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
NorthflankSyncListItemSchema,
|
||||
BitbucketSyncListItemSchema,
|
||||
LaravelForgeSyncListItemSchema,
|
||||
ChefSyncListItemSchema
|
||||
ChefSyncListItemSchema,
|
||||
OctopusDeploySyncListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
TemporaryPermissionMode,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { FilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
@@ -367,7 +367,7 @@ export const registerDeprecatedGroupProjectRouter = async (server: FastifyZodPro
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
filter: z.nativeEnum(FilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
||||
@@ -42,7 +42,8 @@ export enum AppConnection {
|
||||
MongoDB = "mongodb",
|
||||
LaravelForge = "laravel-forge",
|
||||
Chef = "chef",
|
||||
Northflank = "northflank"
|
||||
Northflank = "northflank",
|
||||
OctopusDeploy = "octopus-deploy"
|
||||
}
|
||||
|
||||
export enum AWSRegion {
|
||||
|
||||
@@ -129,6 +129,11 @@ import {
|
||||
NorthflankConnectionMethod,
|
||||
validateNorthflankConnectionCredentials
|
||||
} from "./northflank";
|
||||
import {
|
||||
getOctopusDeployConnectionListItem,
|
||||
OctopusDeployConnectionMethod,
|
||||
validateOctopusDeployConnectionCredentials
|
||||
} from "./octopus-deploy";
|
||||
import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta";
|
||||
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
|
||||
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
|
||||
@@ -211,6 +216,7 @@ export const listAppConnectionOptions = (projectType?: ProjectType) => {
|
||||
getHerokuConnectionListItem(),
|
||||
getRenderConnectionListItem(),
|
||||
getLaravelForgeConnectionListItem(),
|
||||
getOctopusDeployConnectionListItem(),
|
||||
getFlyioConnectionListItem(),
|
||||
getGitLabConnectionListItem(),
|
||||
getCloudflareConnectionListItem(),
|
||||
@@ -360,7 +366,8 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Chef]: validateChefConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Redis]: validateRedisConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.MongoDB]: validateMongoDBConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
[AppConnection.MongoDB]: validateMongoDBConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.OctopusDeploy]: validateOctopusDeployConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
};
|
||||
|
||||
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService, gatewayV2Service);
|
||||
@@ -430,6 +437,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
return "Simple Bind";
|
||||
case RenderConnectionMethod.ApiKey:
|
||||
case ChecklyConnectionMethod.ApiKey:
|
||||
case OctopusDeployConnectionMethod.ApiKey:
|
||||
return "API Key";
|
||||
case ChefConnectionMethod.UserKey:
|
||||
return "User Key";
|
||||
@@ -510,7 +518,8 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.Redis]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.MongoDB]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.LaravelForge]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Chef]: platformManagedCredentialsNotSupported
|
||||
[AppConnection.Chef]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.OctopusDeploy]: platformManagedCredentialsNotSupported
|
||||
};
|
||||
|
||||
export const enterpriseAppCheck = async (
|
||||
|
||||
@@ -44,7 +44,8 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.Redis]: "Redis",
|
||||
[AppConnection.MongoDB]: "MongoDB",
|
||||
[AppConnection.Chef]: "Chef",
|
||||
[AppConnection.Northflank]: "Northflank"
|
||||
[AppConnection.Northflank]: "Northflank",
|
||||
[AppConnection.OctopusDeploy]: "Octopus Deploy"
|
||||
};
|
||||
|
||||
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
|
||||
@@ -91,5 +92,6 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
||||
[AppConnection.Redis]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.MongoDB]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Chef]: AppConnectionPlanType.Enterprise,
|
||||
[AppConnection.Northflank]: AppConnectionPlanType.Regular
|
||||
[AppConnection.Northflank]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.OctopusDeploy]: AppConnectionPlanType.Regular
|
||||
};
|
||||
|
||||
@@ -103,6 +103,8 @@ import { ValidateNetlifyConnectionCredentialsSchema } from "./netlify";
|
||||
import { netlifyConnectionService } from "./netlify/netlify-connection-service";
|
||||
import { ValidateNorthflankConnectionCredentialsSchema } from "./northflank";
|
||||
import { northflankConnectionService } from "./northflank/northflank-connection-service";
|
||||
import { ValidateOctopusDeployConnectionCredentialsSchema } from "./octopus-deploy";
|
||||
import { octopusDeployConnectionService } from "./octopus-deploy/octopus-deploy-connection-service";
|
||||
import { ValidateOktaConnectionCredentialsSchema } from "./okta";
|
||||
import { oktaConnectionService } from "./okta/okta-connection-service";
|
||||
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
|
||||
@@ -182,7 +184,8 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.Okta]: ValidateOktaConnectionCredentialsSchema,
|
||||
[AppConnection.Redis]: ValidateRedisConnectionCredentialsSchema,
|
||||
[AppConnection.MongoDB]: ValidateMongoDBConnectionCredentialsSchema,
|
||||
[AppConnection.Chef]: ValidateChefConnectionCredentialsSchema
|
||||
[AppConnection.Chef]: ValidateChefConnectionCredentialsSchema,
|
||||
[AppConnection.OctopusDeploy]: ValidateOctopusDeployConnectionCredentialsSchema
|
||||
};
|
||||
|
||||
export const appConnectionServiceFactory = ({
|
||||
@@ -891,6 +894,7 @@ export const appConnectionServiceFactory = ({
|
||||
northflank: northflankConnectionService(connectAppConnectionById),
|
||||
okta: oktaConnectionService(connectAppConnectionById),
|
||||
laravelForge: laravelForgeConnectionService(connectAppConnectionById),
|
||||
chef: chefConnectionService(connectAppConnectionById, licenseService)
|
||||
chef: chefConnectionService(connectAppConnectionById, licenseService),
|
||||
octopusDeploy: octopusDeployConnectionService(connectAppConnectionById)
|
||||
};
|
||||
};
|
||||
|
||||
@@ -192,6 +192,12 @@ import {
|
||||
TNorthflankConnectionInput,
|
||||
TValidateNorthflankConnectionCredentialsSchema
|
||||
} from "./northflank";
|
||||
import {
|
||||
TOctopusDeployConnection,
|
||||
TOctopusDeployConnectionConfig,
|
||||
TOctopusDeployConnectionInput,
|
||||
TValidateOctopusDeployConnectionCredentialsSchema
|
||||
} from "./octopus-deploy";
|
||||
import {
|
||||
TOktaConnection,
|
||||
TOktaConnectionConfig,
|
||||
@@ -303,6 +309,7 @@ export type TAppConnection = { id: string } & (
|
||||
| TRedisConnection
|
||||
| TMongoDBConnection
|
||||
| TChefConnection
|
||||
| TOctopusDeployConnection
|
||||
);
|
||||
|
||||
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
|
||||
@@ -354,6 +361,7 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TRedisConnectionInput
|
||||
| TMongoDBConnectionInput
|
||||
| TChefConnectionInput
|
||||
| TOctopusDeployConnectionInput
|
||||
);
|
||||
|
||||
export type TSqlConnectionInput =
|
||||
@@ -422,7 +430,8 @@ export type TAppConnectionConfig =
|
||||
| TOktaConnectionConfig
|
||||
| TRedisConnectionConfig
|
||||
| TMongoDBConnectionConfig
|
||||
| TChefConnectionConfig;
|
||||
| TChefConnectionConfig
|
||||
| TOctopusDeployConnectionConfig;
|
||||
|
||||
export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAwsConnectionCredentialsSchema
|
||||
@@ -468,7 +477,8 @@ export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateOktaConnectionCredentialsSchema
|
||||
| TValidateRedisConnectionCredentialsSchema
|
||||
| TValidateMongoDBConnectionCredentialsSchema
|
||||
| TValidateChefConnectionCredentialsSchema;
|
||||
| TValidateChefConnectionCredentialsSchema
|
||||
| TValidateOctopusDeployConnectionCredentialsSchema;
|
||||
|
||||
export type TListAwsConnectionKmsKeys = {
|
||||
connectionId: string;
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
export * from "./octopus-deploy-connection-enums";
|
||||
export * from "./octopus-deploy-connection-fns";
|
||||
export * from "./octopus-deploy-connection-schemas";
|
||||
export * from "./octopus-deploy-connection-types";
|
||||
@@ -0,0 +1,3 @@
|
||||
export enum OctopusDeployConnectionMethod {
|
||||
ApiKey = "api-key"
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { OctopusDeployConnectionMethod } from "./octopus-deploy-connection-enums";
|
||||
import {
|
||||
TOctopusDeployConnection,
|
||||
TOctopusDeployConnectionConfig,
|
||||
TOctopusDeployProject,
|
||||
TOctopusDeployProjectResponse,
|
||||
TOctopusDeployScopeValues,
|
||||
TOctopusDeployScopeValuesResponse,
|
||||
TOctopusDeploySpace,
|
||||
TOctopusDeploySpaceResponse
|
||||
} from "./octopus-deploy-connection-types";
|
||||
|
||||
export const getOctopusDeployInstanceUrl = async (config: TOctopusDeployConnectionConfig) => {
|
||||
const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
return instanceUrl;
|
||||
};
|
||||
|
||||
export const getOctopusDeployConnectionListItem = () => {
|
||||
return {
|
||||
name: "Octopus Deploy" as const,
|
||||
app: AppConnection.OctopusDeploy as const,
|
||||
methods: Object.values(OctopusDeployConnectionMethod) as [OctopusDeployConnectionMethod.ApiKey]
|
||||
};
|
||||
};
|
||||
|
||||
export const validateOctopusDeployConnectionCredentials = async (config: TOctopusDeployConnectionConfig) => {
|
||||
const instanceUrl = await getOctopusDeployInstanceUrl(config);
|
||||
const { apiKey } = config.credentials;
|
||||
try {
|
||||
await request.get(`${instanceUrl}/api/users/me`, {
|
||||
headers: {
|
||||
"X-Octopus-ApiKey": apiKey,
|
||||
"X-NuGet-ApiKey": apiKey,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate Octopus Deploy credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate Octopus Deploy credentials - verify API key is correct`
|
||||
});
|
||||
}
|
||||
|
||||
return config.credentials;
|
||||
};
|
||||
|
||||
export const getOctopusDeploySpaces = async (
|
||||
appConnection: TOctopusDeployConnection
|
||||
): Promise<TOctopusDeploySpace[]> => {
|
||||
const instanceUrl = await getOctopusDeployInstanceUrl(appConnection);
|
||||
const { apiKey } = appConnection.credentials;
|
||||
|
||||
try {
|
||||
const { data } = await request.get<TOctopusDeploySpaceResponse[]>(`${instanceUrl}/api/spaces/all`, {
|
||||
headers: {
|
||||
"X-Octopus-ApiKey": apiKey,
|
||||
"X-NuGet-ApiKey": apiKey,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
return data.map((space) => ({
|
||||
id: space.Id,
|
||||
name: space.Name,
|
||||
slug: space.Slug,
|
||||
isDefault: space.IsDefault
|
||||
}));
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
const errorMessage = (error.response?.data as { error: { ErrorMessage: string } })?.error?.ErrorMessage;
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to list Octopus Deploy spaces: ${errorMessage || "Unknown error"}`,
|
||||
error: error.response?.data
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to list Octopus Deploy spaces",
|
||||
error
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getOctopusDeployProjects = async (
|
||||
appConnection: TOctopusDeployConnection,
|
||||
spaceId: string
|
||||
): Promise<TOctopusDeployProject[]> => {
|
||||
const instanceUrl = await getOctopusDeployInstanceUrl(appConnection);
|
||||
const { apiKey } = appConnection.credentials;
|
||||
|
||||
try {
|
||||
const { data } = await request.get<TOctopusDeployProjectResponse[]>(`${instanceUrl}/api/${spaceId}/projects/all`, {
|
||||
headers: {
|
||||
"X-Octopus-ApiKey": apiKey,
|
||||
"X-NuGet-ApiKey": apiKey,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
return data.map((project) => ({
|
||||
id: project.Id,
|
||||
name: project.Name,
|
||||
slug: project.Slug
|
||||
}));
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
const errorMessage = (error.response?.data as { error: { ErrorMessage: string } })?.error?.ErrorMessage;
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to list Octopus Deploy projects: ${errorMessage || "Unknown error"}`,
|
||||
error: error.response?.data
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to list Octopus Deploy projects",
|
||||
error
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getOctopusDeployScopeValues = async (
|
||||
appConnection: TOctopusDeployConnection,
|
||||
spaceId: string,
|
||||
projectId: string
|
||||
): Promise<TOctopusDeployScopeValues> => {
|
||||
const instanceUrl = await getOctopusDeployInstanceUrl(appConnection);
|
||||
const { apiKey } = appConnection.credentials;
|
||||
|
||||
try {
|
||||
const { data } = await request.get<TOctopusDeployScopeValuesResponse>(
|
||||
`${instanceUrl}/api/${spaceId}/projects/${projectId}/variables`,
|
||||
{
|
||||
headers: {
|
||||
"X-Octopus-ApiKey": apiKey,
|
||||
"X-NuGet-ApiKey": apiKey,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const { ScopeValues } = data;
|
||||
|
||||
const scopeValues: TOctopusDeployScopeValues = {
|
||||
environments: ScopeValues.Environments.map((environment) => ({
|
||||
id: environment.Id,
|
||||
name: environment.Name
|
||||
})),
|
||||
roles: ScopeValues.Roles.map((role) => ({
|
||||
id: role.Id,
|
||||
name: role.Name
|
||||
})),
|
||||
machines: ScopeValues.Machines.map((machine) => ({
|
||||
id: machine.Id,
|
||||
name: machine.Name
|
||||
})),
|
||||
processes: ScopeValues.Processes.map((process) => ({
|
||||
id: process.Id,
|
||||
name: process.Name
|
||||
})),
|
||||
actions: ScopeValues.Actions.map((action) => ({
|
||||
id: action.Id,
|
||||
name: action.Name
|
||||
})),
|
||||
channels: ScopeValues.Channels.map((channel) => ({
|
||||
id: channel.Id,
|
||||
name: channel.Name
|
||||
}))
|
||||
};
|
||||
|
||||
return scopeValues;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
const errorMessage = (error.response?.data as { error: { ErrorMessage: string } })?.error?.ErrorMessage;
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get Octopus Deploy scope values: ${errorMessage || "Unknown error"}`,
|
||||
error: error.response?.data
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to get Octopus Deploy scope values",
|
||||
error
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,72 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { APP_CONNECTION_NAME_MAP } from "../app-connection-maps";
|
||||
import { OctopusDeployConnectionMethod } from "./octopus-deploy-connection-enums";
|
||||
|
||||
export const OctopusDeployConnectionApiKeyCredentialsSchema = z.object({
|
||||
instanceUrl: z
|
||||
.string()
|
||||
.trim()
|
||||
.url("Invalid Instance URL")
|
||||
.min(1, "Instance URL required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.OCTOPUS_DEPLOY.instanceUrl),
|
||||
apiKey: z.string().trim().min(1, "API key required").describe(AppConnections.CREDENTIALS.OCTOPUS_DEPLOY.apiKey)
|
||||
});
|
||||
|
||||
const BaseOctopusDeployConnectionSchema = BaseAppConnectionSchema.extend({
|
||||
app: z.literal(AppConnection.OctopusDeploy)
|
||||
});
|
||||
|
||||
export const OctopusDeployConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseOctopusDeployConnectionSchema.extend({
|
||||
method: z.literal(OctopusDeployConnectionMethod.ApiKey),
|
||||
credentials: OctopusDeployConnectionApiKeyCredentialsSchema
|
||||
})
|
||||
]);
|
||||
|
||||
export const SanitizedOctopusDeployConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseOctopusDeployConnectionSchema.extend({
|
||||
method: z.literal(OctopusDeployConnectionMethod.ApiKey),
|
||||
credentials: OctopusDeployConnectionApiKeyCredentialsSchema.pick({ instanceUrl: true })
|
||||
}).describe(JSON.stringify({ title: `${APP_CONNECTION_NAME_MAP[AppConnection.OctopusDeploy]} (API Key)` }))
|
||||
]);
|
||||
|
||||
export const ValidateOctopusDeployConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(OctopusDeployConnectionMethod.ApiKey)
|
||||
.describe(AppConnections.CREATE(AppConnection.OctopusDeploy).method),
|
||||
credentials: OctopusDeployConnectionApiKeyCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.OctopusDeploy).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateOctopusDeployConnectionSchema = ValidateOctopusDeployConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.OctopusDeploy)
|
||||
);
|
||||
|
||||
export const UpdateOctopusDeployConnectionSchema = z
|
||||
.object({
|
||||
credentials: OctopusDeployConnectionApiKeyCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.OctopusDeploy).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.OctopusDeploy));
|
||||
|
||||
export const OctopusDeployConnectionListItemSchema = z
|
||||
.object({
|
||||
name: z.literal("Octopus Deploy"),
|
||||
app: z.literal(AppConnection.OctopusDeploy),
|
||||
methods: z.nativeEnum(OctopusDeployConnectionMethod).array()
|
||||
})
|
||||
.describe(JSON.stringify({ title: APP_CONNECTION_NAME_MAP[AppConnection.OctopusDeploy] }));
|
||||
@@ -0,0 +1,65 @@
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import {
|
||||
getOctopusDeployProjects,
|
||||
getOctopusDeployScopeValues,
|
||||
getOctopusDeploySpaces
|
||||
} from "./octopus-deploy-connection-fns";
|
||||
import { TOctopusDeployConnection } from "./octopus-deploy-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TOctopusDeployConnection>;
|
||||
|
||||
export const octopusDeployConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
const listSpaces = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.OctopusDeploy, connectionId, actor);
|
||||
try {
|
||||
const spaces = await getOctopusDeploySpaces(appConnection);
|
||||
|
||||
return spaces;
|
||||
} catch (error) {
|
||||
logger.error({ error, connectionId, actor: actor.type }, "Failed to list Octopus Deploy spaces");
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
const listProjects = async (connectionId: string, spaceId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.OctopusDeploy, connectionId, actor);
|
||||
|
||||
try {
|
||||
const projects = await getOctopusDeployProjects(appConnection, spaceId);
|
||||
|
||||
return projects;
|
||||
} catch (error) {
|
||||
logger.error({ error, connectionId, spaceId, actor: actor.type }, "Failed to list Octopus Deploy projects");
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
const getScopeValues = async (connectionId: string, spaceId: string, projectId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.OctopusDeploy, connectionId, actor);
|
||||
|
||||
try {
|
||||
const scopeValues = await getOctopusDeployScopeValues(appConnection, spaceId, projectId);
|
||||
|
||||
return scopeValues;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ error, connectionId, spaceId, projectId, actor: actor.type },
|
||||
"Failed to get Octopus Deploy scope values"
|
||||
);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listSpaces,
|
||||
listProjects,
|
||||
getScopeValues
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,69 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
CreateOctopusDeployConnectionSchema,
|
||||
OctopusDeployConnectionSchema,
|
||||
ValidateOctopusDeployConnectionCredentialsSchema
|
||||
} from "./octopus-deploy-connection-schemas";
|
||||
|
||||
export type TOctopusDeployConnection = z.infer<typeof OctopusDeployConnectionSchema>;
|
||||
|
||||
export type TOctopusDeployConnectionInput = z.infer<typeof CreateOctopusDeployConnectionSchema> & {
|
||||
app: AppConnection.OctopusDeploy;
|
||||
};
|
||||
|
||||
export type TValidateOctopusDeployConnectionCredentialsSchema = typeof ValidateOctopusDeployConnectionCredentialsSchema;
|
||||
|
||||
export type TOctopusDeployConnectionConfig = DiscriminativePick<
|
||||
TOctopusDeployConnectionInput,
|
||||
"method" | "app" | "credentials"
|
||||
>;
|
||||
|
||||
export type TOctopusDeploySpaceResponse = {
|
||||
Id: string;
|
||||
Name: string;
|
||||
Slug: string;
|
||||
IsDefault: boolean;
|
||||
};
|
||||
|
||||
export type TOctopusDeploySpace = {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
isDefault: boolean;
|
||||
};
|
||||
|
||||
export type TOctopusDeployProjectResponse = {
|
||||
Id: string;
|
||||
Name: string;
|
||||
Slug: string;
|
||||
};
|
||||
|
||||
export type TOctopusDeployProject = {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
|
||||
export type TOctopusDeployScopeValuesResponse = {
|
||||
ScopeValues: {
|
||||
Environments: { Id: string; Name: string }[];
|
||||
Roles: { Id: string; Name: string }[];
|
||||
Machines: { Id: string; Name: string }[];
|
||||
Processes: { Id: string; Name: string }[];
|
||||
Actions: { Id: string; Name: string }[];
|
||||
Channels: { Id: string; Name: string }[];
|
||||
};
|
||||
};
|
||||
|
||||
export type TOctopusDeployScopeValues = {
|
||||
environments: { id: string; name: string }[];
|
||||
roles: { id: string; name: string }[];
|
||||
machines: { id: string; name: string }[];
|
||||
processes: { id: string; name: string }[];
|
||||
actions: { id: string; name: string }[];
|
||||
channels: { id: string; name: string }[];
|
||||
};
|
||||
@@ -31,6 +31,7 @@ import {
|
||||
import { APPROVAL_POLICY_FACTORY_MAP } from "./approval-policy-factory";
|
||||
import {
|
||||
ApprovalPolicyStep,
|
||||
TApprovalPolicyInputs,
|
||||
TApprovalRequest,
|
||||
TCreatePolicyDTO,
|
||||
TCreateRequestDTO,
|
||||
@@ -819,7 +820,18 @@ export const approvalPolicyServiceFactory = ({
|
||||
);
|
||||
|
||||
const grants = await approvalRequestGrantsDAL.find({ projectId, type: policyType });
|
||||
return { grants };
|
||||
const updatedGrants = grants.map((grant) => {
|
||||
if (
|
||||
grant.status === ApprovalRequestGrantStatus.Active &&
|
||||
grant.expiresAt &&
|
||||
new Date(grant.expiresAt) < new Date()
|
||||
) {
|
||||
return { ...grant, status: ApprovalRequestGrantStatus.Expired };
|
||||
}
|
||||
return grant;
|
||||
});
|
||||
|
||||
return { grants: updatedGrants };
|
||||
};
|
||||
|
||||
const getGrantById = async (grantId: string, actor: OrgServiceActor) => {
|
||||
@@ -842,7 +854,15 @@ export const approvalPolicyServiceFactory = ({
|
||||
ProjectPermissionSub.ApprovalRequestGrants
|
||||
);
|
||||
|
||||
return { grant };
|
||||
let { status } = grant;
|
||||
if (
|
||||
grant.status === ApprovalRequestGrantStatus.Active &&
|
||||
grant.expiresAt &&
|
||||
new Date(grant.expiresAt) < new Date()
|
||||
) {
|
||||
status = ApprovalRequestGrantStatus.Expired;
|
||||
}
|
||||
return { grant: { ...grant, status } };
|
||||
};
|
||||
|
||||
const revokeGrant = async (
|
||||
@@ -883,6 +903,36 @@ export const approvalPolicyServiceFactory = ({
|
||||
return { grant: updatedGrant };
|
||||
};
|
||||
|
||||
const checkPolicyMatch = async (
|
||||
policyType: ApprovalPolicyType,
|
||||
{ projectId, inputs }: { projectId: string; inputs: TApprovalPolicyInputs },
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorId: actor.id,
|
||||
actorOrgId: actor.orgId,
|
||||
projectId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
|
||||
const fac = APPROVAL_POLICY_FACTORY_MAP[policyType](policyType);
|
||||
|
||||
const policy = await fac.matchPolicy(approvalPolicyDAL, projectId, inputs);
|
||||
|
||||
if (!policy) {
|
||||
return { requiresApproval: false, hasActiveGrant: false };
|
||||
}
|
||||
|
||||
const hasActiveGrant = await fac.canAccess(approvalRequestGrantsDAL, projectId, actor.id, inputs);
|
||||
|
||||
return {
|
||||
requiresApproval: !hasActiveGrant,
|
||||
hasActiveGrant
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
list,
|
||||
@@ -897,6 +947,7 @@ export const approvalPolicyServiceFactory = ({
|
||||
cancelRequest,
|
||||
listGrants,
|
||||
getGrantById,
|
||||
revokeGrant
|
||||
revokeGrant,
|
||||
checkPolicyMatch
|
||||
};
|
||||
};
|
||||
|
||||
@@ -26,13 +26,16 @@ export const pamAccessPolicyFactory: TApprovalResourceFactory<
|
||||
|
||||
let bestMatch: { policy: TPamAccessPolicy; wildcardCount: number; pathLength: number } | null = null;
|
||||
|
||||
const normalizedAccountPath = inputs.accountPath.startsWith("/") ? inputs.accountPath.slice(1) : inputs.accountPath;
|
||||
|
||||
for (const policy of policies) {
|
||||
const p = policy as TPamAccessPolicy;
|
||||
for (const c of p.conditions.conditions) {
|
||||
// Find the most specific path pattern
|
||||
// TODO(andrey): Make matching logic more advanced by accounting for wildcard positions
|
||||
for (const pathPattern of c.accountPaths) {
|
||||
if (picomatch(pathPattern)(inputs.accountPath)) {
|
||||
const normalizedPathPattern = pathPattern.startsWith("/") ? pathPattern.slice(1) : pathPattern;
|
||||
if (picomatch(normalizedPathPattern)(normalizedAccountPath)) {
|
||||
const wildcardCount = (pathPattern.match(/\*/g) || []).length;
|
||||
const pathLength = pathPattern.length;
|
||||
|
||||
@@ -65,11 +68,16 @@ export const pamAccessPolicyFactory: TApprovalResourceFactory<
|
||||
revokedAt: null
|
||||
});
|
||||
|
||||
const normalizedAccountPath = inputs.accountPath.startsWith("/") ? inputs.accountPath.slice(1) : inputs.accountPath;
|
||||
|
||||
// TODO(andrey): Move some of this check to be part of SQL query
|
||||
return grants.some((grant) => {
|
||||
const grantAttributes = grant.attributes as TPamAccessPolicyInputs;
|
||||
const isMatch = picomatch(grantAttributes.accountPath);
|
||||
return isMatch(inputs.accountPath) && (!grant.expiresAt || grant.expiresAt > new Date());
|
||||
const normalizedGrantPath = grantAttributes.accountPath.startsWith("/")
|
||||
? grantAttributes.accountPath.slice(1)
|
||||
: grantAttributes.accountPath;
|
||||
const isMatch = picomatch(normalizedGrantPath);
|
||||
return isMatch(normalizedAccountPath) && (!grant.expiresAt || grant.expiresAt > new Date());
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ export enum ActorType { // would extend to AWS, Azure, ...
|
||||
IDENTITY = "identity",
|
||||
Machine = "machine",
|
||||
SCIM_CLIENT = "scimClient",
|
||||
ACME_PROFILE = "acmeProfile",
|
||||
ACME_ACCOUNT = "acmeAccount",
|
||||
UNKNOWN_USER = "unknownUser"
|
||||
}
|
||||
|
||||
@@ -168,7 +168,11 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
db.ref("autoRenew").withSchema(TableName.PkiApiEnrollmentConfig).as("apiConfigAutoRenew"),
|
||||
db.ref("renewBeforeDays").withSchema(TableName.PkiApiEnrollmentConfig).as("apiConfigRenewBeforeDays"),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeConfigId"),
|
||||
db.ref("encryptedEabSecret").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeConfigEncryptedEabSecret")
|
||||
db.ref("encryptedEabSecret").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeConfigEncryptedEabSecret"),
|
||||
db
|
||||
.ref("skipDnsOwnershipVerification")
|
||||
.withSchema(TableName.PkiAcmeEnrollmentConfig)
|
||||
.as("acmeConfigSkipDnsOwnershipVerification")
|
||||
)
|
||||
.where(`${TableName.PkiCertificateProfile}.id`, id)
|
||||
.first();
|
||||
@@ -198,7 +202,8 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
const acmeConfig = result.acmeConfigId
|
||||
? ({
|
||||
id: result.acmeConfigId,
|
||||
encryptedEabSecret: result.acmeConfigEncryptedEabSecret
|
||||
encryptedEabSecret: result.acmeConfigEncryptedEabSecret,
|
||||
skipDnsOwnershipVerification: result.acmeConfigSkipDnsOwnershipVerification ?? false
|
||||
} as TCertificateProfileWithConfigs["acmeConfig"])
|
||||
: undefined;
|
||||
|
||||
@@ -356,7 +361,11 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.PkiApiEnrollmentConfig).as("apiId"),
|
||||
db.ref("autoRenew").withSchema(TableName.PkiApiEnrollmentConfig).as("apiAutoRenew"),
|
||||
db.ref("renewBeforeDays").withSchema(TableName.PkiApiEnrollmentConfig).as("apiRenewBeforeDays"),
|
||||
db.ref("id").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeId")
|
||||
db.ref("id").withSchema(TableName.PkiAcmeEnrollmentConfig).as("acmeId"),
|
||||
db
|
||||
.ref("skipDnsOwnershipVerification")
|
||||
.withSchema(TableName.PkiAcmeEnrollmentConfig)
|
||||
.as("acmeSkipDnsOwnershipVerification")
|
||||
);
|
||||
|
||||
if (processedRules) {
|
||||
@@ -393,7 +402,8 @@ export const certificateProfileDALFactory = (db: TDbClient) => {
|
||||
|
||||
const acmeConfig = result.acmeId
|
||||
? {
|
||||
id: result.acmeId as string
|
||||
id: result.acmeId as string,
|
||||
skipDnsOwnershipVerification: !!result.acmeSkipDnsOwnershipVerification
|
||||
}
|
||||
: undefined;
|
||||
|
||||
|
||||
@@ -30,7 +30,11 @@ export const createCertificateProfileSchema = z
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z.object({}).optional()
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
@@ -155,6 +159,11 @@ export const updateCertificateProfileSchema = z
|
||||
autoRenew: z.boolean().default(false),
|
||||
renewBeforeDays: z.number().min(1).max(30).optional()
|
||||
})
|
||||
.optional(),
|
||||
acmeConfig: z
|
||||
.object({
|
||||
skipDnsOwnershipVerification: z.boolean().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.refine(
|
||||
|
||||
@@ -403,7 +403,13 @@ export const certificateProfileServiceFactory = ({
|
||||
apiConfigId = apiConfig.id;
|
||||
} else if (data.enrollmentType === EnrollmentType.ACME && data.acmeConfig) {
|
||||
const { encryptedEabSecret } = await generateAndEncryptAcmeEabSecret(projectId, kmsService, projectDAL);
|
||||
const acmeConfig = await acmeEnrollmentConfigDAL.create({ encryptedEabSecret }, tx);
|
||||
const acmeConfig = await acmeEnrollmentConfigDAL.create(
|
||||
{
|
||||
skipDnsOwnershipVerification: data.acmeConfig.skipDnsOwnershipVerification ?? false,
|
||||
encryptedEabSecret
|
||||
},
|
||||
tx
|
||||
);
|
||||
acmeConfigId = acmeConfig.id;
|
||||
}
|
||||
|
||||
@@ -505,7 +511,7 @@ export const certificateProfileServiceFactory = ({
|
||||
const updatedData =
|
||||
finalIssuerType === IssuerType.SELF_SIGNED && existingProfile.caId ? { ...data, caId: null } : data;
|
||||
|
||||
const { estConfig, apiConfig, ...profileUpdateData } = updatedData;
|
||||
const { estConfig, apiConfig, acmeConfig, ...profileUpdateData } = updatedData;
|
||||
|
||||
const updatedProfile = await certificateProfileDAL.transaction(async (tx) => {
|
||||
if (estConfig && existingProfile.estConfigId) {
|
||||
@@ -547,6 +553,16 @@ export const certificateProfileServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
if (acmeConfig && existingProfile.acmeConfigId) {
|
||||
await acmeEnrollmentConfigDAL.updateById(
|
||||
existingProfile.acmeConfigId,
|
||||
{
|
||||
skipDnsOwnershipVerification: acmeConfig.skipDnsOwnershipVerification ?? false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
const profileResult = await certificateProfileDAL.updateById(profileId, profileUpdateData, tx);
|
||||
return profileResult;
|
||||
});
|
||||
|
||||
@@ -46,7 +46,9 @@ export type TCertificateProfileUpdate = Omit<
|
||||
autoRenew?: boolean;
|
||||
renewBeforeDays?: number;
|
||||
};
|
||||
acmeConfig?: unknown;
|
||||
acmeConfig?: {
|
||||
skipDnsOwnershipVerification?: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export type TCertificateProfileWithConfigs = TCertificateProfile & {
|
||||
@@ -83,6 +85,7 @@ export type TCertificateProfileWithConfigs = TCertificateProfile & {
|
||||
id: string;
|
||||
directoryUrl: string;
|
||||
encryptedEabSecret?: Buffer;
|
||||
skipDnsOwnershipVerification?: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -3,10 +3,23 @@ import { Knex } from "knex";
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TCertificateRequests, TCertificates } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { sanitizeSqlLikeString } from "@app/lib/fn/string";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import {
|
||||
applyProcessedPermissionRulesToQuery,
|
||||
type ProcessedPermissionRules
|
||||
} from "@app/lib/knex/permission-filter-utils";
|
||||
|
||||
type TCertificateRequestWithCertificate = TCertificateRequests & {
|
||||
certificate: TCertificates | null;
|
||||
profileName: string | null;
|
||||
};
|
||||
|
||||
type TCertificateRequestQueryResult = TCertificateRequests & {
|
||||
certId: string | null;
|
||||
certSerialNumber: string | null;
|
||||
certStatus: string | null;
|
||||
profileName: string | null;
|
||||
};
|
||||
|
||||
export type TCertificateRequestDALFactory = ReturnType<typeof certificateRequestDALFactory>;
|
||||
@@ -16,24 +29,41 @@ export const certificateRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByIdWithCertificate = async (id: string): Promise<TCertificateRequestWithCertificate | null> => {
|
||||
try {
|
||||
const certificateRequest = await certificateRequestOrm.findById(id);
|
||||
if (!certificateRequest) return null;
|
||||
const result = (await db(TableName.CertificateRequests)
|
||||
.leftJoin(
|
||||
TableName.Certificate,
|
||||
`${TableName.CertificateRequests}.certificateId`,
|
||||
`${TableName.Certificate}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.PkiCertificateProfile,
|
||||
`${TableName.CertificateRequests}.profileId`,
|
||||
`${TableName.PkiCertificateProfile}.id`
|
||||
)
|
||||
.where(`${TableName.CertificateRequests}.id`, id)
|
||||
.select(selectAllTableCols(TableName.CertificateRequests))
|
||||
.select(db.ref("slug").withSchema(TableName.PkiCertificateProfile).as("profileName"))
|
||||
.select(db.ref("id").withSchema(TableName.Certificate).as("certId"))
|
||||
.select(db.ref("serialNumber").withSchema(TableName.Certificate).as("certSerialNumber"))
|
||||
.select(db.ref("status").withSchema(TableName.Certificate).as("certStatus"))
|
||||
.first()) as TCertificateRequestQueryResult | undefined;
|
||||
|
||||
if (!certificateRequest.certificateId) {
|
||||
return {
|
||||
...certificateRequest,
|
||||
certificate: null
|
||||
};
|
||||
}
|
||||
if (!result) return null;
|
||||
|
||||
const certificate = await db(TableName.Certificate)
|
||||
.where("id", certificateRequest.certificateId)
|
||||
.select(selectAllTableCols(TableName.Certificate))
|
||||
.first();
|
||||
const { certId, certSerialNumber, certStatus, profileName, ...certificateRequestData } = result;
|
||||
|
||||
const certificate: TCertificates | null = certId
|
||||
? ({
|
||||
id: certId,
|
||||
serialNumber: certSerialNumber,
|
||||
status: certStatus
|
||||
} as TCertificates)
|
||||
: null;
|
||||
|
||||
return {
|
||||
...certificateRequest,
|
||||
certificate: certificate || null
|
||||
...certificateRequestData,
|
||||
profileName: profileName || null,
|
||||
certificate
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find certificate request by ID with certificate" });
|
||||
@@ -82,11 +112,259 @@ export const certificateRequestDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findByProjectId = async (
|
||||
projectId: string,
|
||||
options: {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
status?: string;
|
||||
fromDate?: Date;
|
||||
toDate?: Date;
|
||||
profileIds?: string[];
|
||||
} = {},
|
||||
processedRules?: ProcessedPermissionRules,
|
||||
tx?: Knex
|
||||
): Promise<TCertificateRequests[]> => {
|
||||
try {
|
||||
const { offset = 0, limit = 20, search, status, fromDate, toDate, profileIds } = options;
|
||||
|
||||
let query = (tx || db)(TableName.CertificateRequests)
|
||||
.leftJoin(
|
||||
TableName.PkiCertificateProfile,
|
||||
`${TableName.CertificateRequests}.profileId`,
|
||||
`${TableName.PkiCertificateProfile}.id`
|
||||
)
|
||||
.where(`${TableName.CertificateRequests}.projectId`, projectId);
|
||||
|
||||
if (profileIds && profileIds.length > 0) {
|
||||
query = query.whereIn(`${TableName.CertificateRequests}.profileId`, profileIds);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
|
||||
const sanitizedSearch = sanitizeSqlLikeString(search);
|
||||
query = query.where((builder) => {
|
||||
void builder
|
||||
.whereILike(`${TableName.CertificateRequests}.commonName`, `%${sanitizedSearch}%`)
|
||||
.orWhereILike(`${TableName.CertificateRequests}.altNames`, `%${sanitizedSearch}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (status) {
|
||||
query = query.where(`${TableName.CertificateRequests}.status`, status);
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
query = query.where(`${TableName.CertificateRequests}.createdAt`, ">=", fromDate);
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
query = query.where(`${TableName.CertificateRequests}.createdAt`, "<=", toDate);
|
||||
}
|
||||
|
||||
query = query
|
||||
.select(selectAllTableCols(TableName.CertificateRequests))
|
||||
.select(db.ref("slug").withSchema(TableName.PkiCertificateProfile).as("profileName"));
|
||||
|
||||
if (processedRules) {
|
||||
query = applyProcessedPermissionRulesToQuery(
|
||||
query,
|
||||
TableName.CertificateRequests,
|
||||
processedRules
|
||||
) as typeof query;
|
||||
}
|
||||
|
||||
const certificateRequests = await query.orderBy("createdAt", "desc").offset(offset).limit(limit);
|
||||
|
||||
return certificateRequests;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find certificate requests by project ID" });
|
||||
}
|
||||
};
|
||||
|
||||
const countByProjectId = async (
|
||||
projectId: string,
|
||||
options: {
|
||||
search?: string;
|
||||
status?: string;
|
||||
fromDate?: Date;
|
||||
toDate?: Date;
|
||||
profileIds?: string[];
|
||||
} = {},
|
||||
processedRules?: ProcessedPermissionRules,
|
||||
tx?: Knex
|
||||
): Promise<number> => {
|
||||
try {
|
||||
const { search, status, fromDate, toDate, profileIds } = options;
|
||||
|
||||
let query = (tx || db)(TableName.CertificateRequests)
|
||||
.leftJoin(
|
||||
TableName.PkiCertificateProfile,
|
||||
`${TableName.CertificateRequests}.profileId`,
|
||||
`${TableName.PkiCertificateProfile}.id`
|
||||
)
|
||||
.where(`${TableName.CertificateRequests}.projectId`, projectId);
|
||||
if (profileIds && profileIds.length > 0) {
|
||||
query = query.whereIn(`${TableName.CertificateRequests}.profileId`, profileIds);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
|
||||
const sanitizedSearch = sanitizeSqlLikeString(search);
|
||||
query = query.where((builder) => {
|
||||
void builder
|
||||
.whereILike(`${TableName.CertificateRequests}.commonName`, `%${sanitizedSearch}%`)
|
||||
.orWhereILike(`${TableName.CertificateRequests}.altNames`, `%${sanitizedSearch}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (status) {
|
||||
query = query.where(`${TableName.CertificateRequests}.status`, status);
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
query = query.where(`${TableName.CertificateRequests}.createdAt`, ">=", fromDate);
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
query = query.where(`${TableName.CertificateRequests}.createdAt`, "<=", toDate);
|
||||
}
|
||||
|
||||
if (processedRules) {
|
||||
query = applyProcessedPermissionRulesToQuery(
|
||||
query,
|
||||
TableName.CertificateRequests,
|
||||
processedRules
|
||||
) as typeof query;
|
||||
}
|
||||
|
||||
const result = await query.count("*").first();
|
||||
const count = (result as unknown as Record<string, unknown>)?.count;
|
||||
return parseInt(String(count || "0"), 10);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count certificate requests by project ID" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByProjectIdWithCertificate = async (
|
||||
projectId: string,
|
||||
options: {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
status?: string;
|
||||
fromDate?: Date;
|
||||
toDate?: Date;
|
||||
profileIds?: string[];
|
||||
sortBy?: string;
|
||||
sortOrder?: "asc" | "desc";
|
||||
} = {},
|
||||
processedRules?: ProcessedPermissionRules,
|
||||
tx?: Knex
|
||||
): Promise<TCertificateRequestWithCertificate[]> => {
|
||||
try {
|
||||
const {
|
||||
offset = 0,
|
||||
limit = 20,
|
||||
search,
|
||||
status,
|
||||
fromDate,
|
||||
toDate,
|
||||
profileIds,
|
||||
sortBy = "createdAt",
|
||||
sortOrder = "desc"
|
||||
} = options;
|
||||
|
||||
let query: Knex.QueryBuilder = (tx || db)(TableName.CertificateRequests)
|
||||
.leftJoin(
|
||||
TableName.Certificate,
|
||||
`${TableName.CertificateRequests}.certificateId`,
|
||||
`${TableName.Certificate}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.PkiCertificateProfile,
|
||||
`${TableName.CertificateRequests}.profileId`,
|
||||
`${TableName.PkiCertificateProfile}.id`
|
||||
);
|
||||
|
||||
if (profileIds && profileIds.length > 0) {
|
||||
query = query.whereIn(`${TableName.CertificateRequests}.profileId`, profileIds);
|
||||
}
|
||||
|
||||
query = query
|
||||
.select(selectAllTableCols(TableName.CertificateRequests))
|
||||
.select(db.ref("slug").withSchema(TableName.PkiCertificateProfile).as("profileName"))
|
||||
.select(db.ref("id").withSchema(TableName.Certificate).as("certId"))
|
||||
.select(db.ref("serialNumber").withSchema(TableName.Certificate).as("certSerialNumber"))
|
||||
.select(db.ref("status").withSchema(TableName.Certificate).as("certStatus"))
|
||||
.where(`${TableName.CertificateRequests}.projectId`, projectId);
|
||||
|
||||
if (search) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
|
||||
const sanitizedSearch = sanitizeSqlLikeString(search);
|
||||
query = query.where((builder) => {
|
||||
void builder
|
||||
.whereILike(`${TableName.CertificateRequests}.commonName`, `%${sanitizedSearch}%`)
|
||||
.orWhereILike(`${TableName.CertificateRequests}.altNames`, `%${sanitizedSearch}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (status) {
|
||||
query = query.where(`${TableName.CertificateRequests}.status`, status);
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
query = query.where(`${TableName.CertificateRequests}.createdAt`, ">=", fromDate);
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
query = query.where(`${TableName.CertificateRequests}.createdAt`, "<=", toDate);
|
||||
}
|
||||
|
||||
if (processedRules) {
|
||||
query = applyProcessedPermissionRulesToQuery(query, TableName.CertificateRequests, processedRules);
|
||||
}
|
||||
|
||||
const allowedSortColumns = ["createdAt", "updatedAt", "status", "commonName"];
|
||||
const safeSortBy = allowedSortColumns.includes(sortBy) ? sortBy : "createdAt";
|
||||
const safeSortOrder = sortOrder === "asc" || sortOrder === "desc" ? sortOrder : "desc";
|
||||
|
||||
const results = (await query
|
||||
.orderBy(`${TableName.CertificateRequests}.${safeSortBy}`, safeSortOrder)
|
||||
.offset(offset)
|
||||
.limit(limit)) as TCertificateRequestQueryResult[];
|
||||
|
||||
return results.map((row): TCertificateRequestWithCertificate => {
|
||||
const { certId, certSerialNumber, certStatus, profileName: rowProfileName, ...certificateRequestData } = row;
|
||||
|
||||
const certificate: TCertificates | null = certId
|
||||
? ({
|
||||
id: certId,
|
||||
serialNumber: certSerialNumber,
|
||||
status: certStatus
|
||||
} as TCertificates)
|
||||
: null;
|
||||
|
||||
return {
|
||||
...certificateRequestData,
|
||||
profileName: rowProfileName || null,
|
||||
certificate
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find certificate requests by project ID with certificates" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...certificateRequestOrm,
|
||||
findByIdWithCertificate,
|
||||
findPendingByProjectId,
|
||||
updateStatus,
|
||||
attachCertificate
|
||||
attachCertificate,
|
||||
findByProjectId,
|
||||
countByProjectId,
|
||||
findByProjectIdWithCertificate
|
||||
};
|
||||
};
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
ProjectPermissionCertificateProfileActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { getProcessedPermissionRules } from "@app/lib/casl/permission-filter-utils";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
@@ -21,6 +22,7 @@ import {
|
||||
TCreateCertificateRequestDTO,
|
||||
TGetCertificateFromRequestDTO,
|
||||
TGetCertificateRequestDTO,
|
||||
TListCertificateRequestsDTO,
|
||||
TUpdateCertificateRequestStatusDTO
|
||||
} from "./certificate-request-types";
|
||||
|
||||
@@ -91,6 +93,7 @@ export const certificateRequestServiceFactory = ({
|
||||
permissionService
|
||||
}: TCertificateRequestServiceFactoryDep) => {
|
||||
const createCertificateRequest = async ({
|
||||
acmeOrderId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
@@ -123,6 +126,7 @@ export const certificateRequestServiceFactory = ({
|
||||
{
|
||||
status,
|
||||
projectId,
|
||||
acmeOrderId,
|
||||
...validatedData
|
||||
},
|
||||
tx
|
||||
@@ -283,11 +287,76 @@ export const certificateRequestServiceFactory = ({
|
||||
return certificateRequestDAL.attachCertificate(certificateRequestId, certificateId);
|
||||
};
|
||||
|
||||
const listCertificateRequests = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
projectId,
|
||||
offset = 0,
|
||||
limit = 20,
|
||||
search,
|
||||
status,
|
||||
fromDate,
|
||||
toDate,
|
||||
profileIds,
|
||||
sortBy,
|
||||
sortOrder
|
||||
}: TListCertificateRequestsDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.CertificateManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionCertificateActions.Read,
|
||||
ProjectPermissionSub.Certificates
|
||||
);
|
||||
|
||||
const processedRules = getProcessedPermissionRules(
|
||||
permission,
|
||||
ProjectPermissionCertificateActions.Read,
|
||||
ProjectPermissionSub.Certificates
|
||||
);
|
||||
|
||||
const options: Parameters<typeof certificateRequestDAL.findByProjectIdWithCertificate>[1] = {
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
status,
|
||||
fromDate,
|
||||
toDate,
|
||||
profileIds,
|
||||
sortBy,
|
||||
sortOrder
|
||||
};
|
||||
|
||||
const [certificateRequests, totalCount] = await Promise.all([
|
||||
certificateRequestDAL.findByProjectIdWithCertificate(projectId, options, processedRules),
|
||||
certificateRequestDAL.countByProjectId(projectId, options, processedRules)
|
||||
]);
|
||||
|
||||
const mappedCertificateRequests = certificateRequests.map((request) => ({
|
||||
...request,
|
||||
status: request.status as CertificateRequestStatus
|
||||
}));
|
||||
|
||||
return {
|
||||
certificateRequests: mappedCertificateRequests,
|
||||
totalCount
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
createCertificateRequest,
|
||||
getCertificateRequest,
|
||||
getCertificateFromRequest,
|
||||
updateCertificateRequestStatus,
|
||||
attachCertificateToRequest
|
||||
attachCertificateToRequest,
|
||||
listCertificateRequests
|
||||
};
|
||||
};
|
||||
|
||||
@@ -21,6 +21,7 @@ export type TCreateCertificateRequestDTO = TProjectPermission & {
|
||||
metadata?: string;
|
||||
status: CertificateRequestStatus;
|
||||
certificateId?: string;
|
||||
acmeOrderId?: string;
|
||||
};
|
||||
|
||||
export type TGetCertificateRequestDTO = TProjectPermission & {
|
||||
@@ -41,3 +42,15 @@ export type TAttachCertificateToRequestDTO = {
|
||||
certificateRequestId: string;
|
||||
certificateId: string;
|
||||
};
|
||||
|
||||
export type TListCertificateRequestsDTO = TProjectPermission & {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
status?: CertificateRequestStatus;
|
||||
fromDate?: Date;
|
||||
toDate?: Date;
|
||||
profileIds?: string[];
|
||||
sortBy?: string;
|
||||
sortOrder?: "asc" | "desc";
|
||||
};
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import RE2 from "re2";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TCertificates } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { sanitizeSqlLikeString } from "@app/lib/fn/string";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import {
|
||||
applyProcessedPermissionRulesToQuery,
|
||||
type ProcessedPermissionRules
|
||||
} from "@app/lib/knex/permission-filter-utils";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
|
||||
import { CertStatus } from "./certificate-types";
|
||||
|
||||
@@ -48,11 +48,21 @@ export const certificateDALFactory = (db: TDbClient) => {
|
||||
const countCertificatesInProject = async ({
|
||||
projectId,
|
||||
friendlyName,
|
||||
commonName
|
||||
commonName,
|
||||
search,
|
||||
status,
|
||||
profileIds,
|
||||
fromDate,
|
||||
toDate
|
||||
}: {
|
||||
projectId: string;
|
||||
friendlyName?: string;
|
||||
commonName?: string;
|
||||
search?: string;
|
||||
status?: string | string[];
|
||||
profileIds?: string[];
|
||||
fromDate?: Date;
|
||||
toDate?: Date;
|
||||
}) => {
|
||||
try {
|
||||
interface CountResult {
|
||||
@@ -66,15 +76,69 @@ export const certificateDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.Project}.id`, projectId);
|
||||
|
||||
if (friendlyName) {
|
||||
const sanitizedValue = String(friendlyName).replace(new RE2("[%_\\\\]", "g"), "\\$&");
|
||||
const sanitizedValue = sanitizeSqlLikeString(friendlyName);
|
||||
query = query.andWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`);
|
||||
}
|
||||
|
||||
if (commonName) {
|
||||
const sanitizedValue = String(commonName).replace(new RE2("[%_\\\\]", "g"), "\\$&");
|
||||
const sanitizedValue = sanitizeSqlLikeString(commonName);
|
||||
query = query.andWhere(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
const sanitizedValue = sanitizeSqlLikeString(search);
|
||||
query = query.andWhere((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`)
|
||||
.orWhere(`${TableName.Certificate}.altNames`, "like", `%${sanitizedValue}%`)
|
||||
.orWhere(`${TableName.Certificate}.serialNumber`, "like", `%${sanitizedValue}%`)
|
||||
.orWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`);
|
||||
|
||||
if (isUuidV4(sanitizedValue)) {
|
||||
void qb.orWhere(`${TableName.Certificate}.id`, sanitizedValue);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (status) {
|
||||
const now = new Date();
|
||||
const statuses = Array.isArray(status) ? status : [status];
|
||||
|
||||
query = query.andWhere((qb) => {
|
||||
statuses.forEach((statusValue, index) => {
|
||||
const whereMethod = index === 0 ? "where" : "orWhere";
|
||||
|
||||
if (statusValue === CertStatus.ACTIVE) {
|
||||
void qb[whereMethod]((innerQb) => {
|
||||
void innerQb
|
||||
.where(`${TableName.Certificate}.notAfter`, ">", now)
|
||||
.andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED);
|
||||
});
|
||||
} else if (statusValue === CertStatus.EXPIRED) {
|
||||
void qb[whereMethod]((innerQb) => {
|
||||
void innerQb
|
||||
.where(`${TableName.Certificate}.notAfter`, "<=", now)
|
||||
.andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED);
|
||||
});
|
||||
} else {
|
||||
void qb[whereMethod](`${TableName.Certificate}.status`, statusValue);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
query = query.andWhere(`${TableName.Certificate}.createdAt`, ">=", fromDate);
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
query = query.andWhere(`${TableName.Certificate}.createdAt`, "<=", toDate);
|
||||
}
|
||||
|
||||
if (profileIds) {
|
||||
query = query.whereIn(`${TableName.Certificate}.profileId`, profileIds);
|
||||
}
|
||||
|
||||
const count = await query.count("*").first();
|
||||
|
||||
return parseInt((count as unknown as CountResult).count || "0", 10);
|
||||
@@ -160,7 +224,7 @@ export const certificateDALFactory = (db: TDbClient) => {
|
||||
Object.entries(filter).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null) {
|
||||
if (key === "friendlyName" || key === "commonName") {
|
||||
const sanitizedValue = String(value).replace(new RE2("[%_\\\\]", "g"), "\\$&");
|
||||
const sanitizedValue = sanitizeSqlLikeString(String(value));
|
||||
query = query.andWhere(`${TableName.Certificate}.${key}`, "like", `%${sanitizedValue}%`);
|
||||
} else {
|
||||
query = query.andWhere(`${TableName.Certificate}.${key}`, value);
|
||||
@@ -213,12 +277,12 @@ export const certificateDALFactory = (db: TDbClient) => {
|
||||
.whereNull(`${TableName.Certificate}.renewedByCertificateId`);
|
||||
|
||||
if (friendlyName) {
|
||||
const sanitizedValue = String(friendlyName).replace(new RE2("[%_\\\\]", "g"), "\\$&");
|
||||
const sanitizedValue = sanitizeSqlLikeString(friendlyName);
|
||||
query = query.andWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`);
|
||||
}
|
||||
|
||||
if (commonName) {
|
||||
const sanitizedValue = String(commonName).replace(new RE2("[%_\\\\]", "g"), "\\$&");
|
||||
const sanitizedValue = sanitizeSqlLikeString(commonName);
|
||||
query = query.andWhere(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`);
|
||||
}
|
||||
|
||||
@@ -275,7 +339,17 @@ export const certificateDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const findWithPrivateKeyInfo = async (
|
||||
filter: Partial<TCertificates & { friendlyName?: string; commonName?: string }>,
|
||||
filter: Partial<
|
||||
TCertificates & {
|
||||
friendlyName?: string;
|
||||
commonName?: string;
|
||||
search?: string;
|
||||
status?: string | string[];
|
||||
profileIds?: string[];
|
||||
fromDate?: Date;
|
||||
toDate?: Date;
|
||||
}
|
||||
>,
|
||||
options?: { offset?: number; limit?: number; sort?: [string, "asc" | "desc"][] },
|
||||
permissionFilters?: ProcessedPermissionRules
|
||||
): Promise<(TCertificates & { hasPrivateKey: boolean })[]> => {
|
||||
@@ -286,17 +360,78 @@ export const certificateDALFactory = (db: TDbClient) => {
|
||||
.select(selectAllTableCols(TableName.Certificate))
|
||||
.select(db.ref(`${TableName.CertificateSecret}.certId`).as("privateKeyRef"));
|
||||
|
||||
Object.entries(filter).forEach(([key, value]) => {
|
||||
const { friendlyName, commonName, search, status, profileIds, fromDate, toDate, ...regularFilters } = filter;
|
||||
|
||||
Object.entries(regularFilters).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null) {
|
||||
if (key === "friendlyName" || key === "commonName") {
|
||||
const sanitizedValue = String(value).replace(new RE2("[%_\\\\]", "g"), "\\$&");
|
||||
query = query.andWhere(`${TableName.Certificate}.${key}`, "like", `%${sanitizedValue}%`);
|
||||
} else {
|
||||
query = query.andWhere(`${TableName.Certificate}.${key}`, value);
|
||||
}
|
||||
query = query.andWhere(`${TableName.Certificate}.${key}`, value);
|
||||
}
|
||||
});
|
||||
|
||||
if (friendlyName) {
|
||||
const sanitizedValue = sanitizeSqlLikeString(friendlyName);
|
||||
query = query.andWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`);
|
||||
}
|
||||
|
||||
if (commonName) {
|
||||
const sanitizedValue = sanitizeSqlLikeString(commonName);
|
||||
query = query.andWhere(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
const sanitizedValue = sanitizeSqlLikeString(search);
|
||||
query = query.andWhere((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`)
|
||||
.orWhere(`${TableName.Certificate}.altNames`, "like", `%${sanitizedValue}%`)
|
||||
.orWhere(`${TableName.Certificate}.serialNumber`, "like", `%${sanitizedValue}%`)
|
||||
.orWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`);
|
||||
|
||||
if (isUuidV4(sanitizedValue)) {
|
||||
void qb.orWhere(`${TableName.Certificate}.id`, sanitizedValue);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (status) {
|
||||
const now = new Date();
|
||||
const statuses = Array.isArray(status) ? status : [status];
|
||||
|
||||
query = query.andWhere((qb) => {
|
||||
statuses.forEach((statusValue, index) => {
|
||||
const whereMethod = index === 0 ? "where" : "orWhere";
|
||||
|
||||
if (statusValue === CertStatus.ACTIVE) {
|
||||
void qb[whereMethod]((innerQb) => {
|
||||
void innerQb
|
||||
.where(`${TableName.Certificate}.notAfter`, ">", now)
|
||||
.andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED);
|
||||
});
|
||||
} else if (statusValue === CertStatus.EXPIRED) {
|
||||
void qb[whereMethod]((innerQb) => {
|
||||
void innerQb
|
||||
.where(`${TableName.Certificate}.notAfter`, "<=", now)
|
||||
.andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED);
|
||||
});
|
||||
} else {
|
||||
void qb[whereMethod](`${TableName.Certificate}.status`, statusValue);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
query = query.andWhere(`${TableName.Certificate}.createdAt`, ">=", fromDate);
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
query = query.andWhere(`${TableName.Certificate}.createdAt`, "<=", toDate);
|
||||
}
|
||||
|
||||
if (profileIds) {
|
||||
query = query.whereIn(`${TableName.Certificate}.profileId`, profileIds);
|
||||
}
|
||||
|
||||
if (permissionFilters) {
|
||||
query = applyProcessedPermissionRulesToQuery(query, TableName.Certificate, permissionFilters) as typeof query;
|
||||
}
|
||||
|
||||
@@ -1,61 +1,13 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
import { TAcmeEnrollmentConfigInsert, TAcmeEnrollmentConfigUpdate } from "./enrollment-config-types";
|
||||
|
||||
export type TAcmeEnrollmentConfigDALFactory = ReturnType<typeof acmeEnrollmentConfigDALFactory>;
|
||||
|
||||
export const acmeEnrollmentConfigDALFactory = (db: TDbClient) => {
|
||||
const acmeEnrollmentConfigOrm = ormify(db, TableName.PkiAcmeEnrollmentConfig);
|
||||
|
||||
const create = async (data: TAcmeEnrollmentConfigInsert, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db)(TableName.PkiAcmeEnrollmentConfig).insert(data).returning("*");
|
||||
const [acmeConfig] = result;
|
||||
|
||||
if (!acmeConfig) {
|
||||
throw new Error("Failed to create ACME enrollment config");
|
||||
}
|
||||
|
||||
return acmeConfig;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Create ACME enrollment config" });
|
||||
}
|
||||
};
|
||||
|
||||
const updateById = async (id: string, data: TAcmeEnrollmentConfigUpdate, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db)(TableName.PkiAcmeEnrollmentConfig).where({ id }).update(data).returning("*");
|
||||
const [acmeConfig] = result;
|
||||
|
||||
if (!acmeConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return acmeConfig;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Update ACME enrollment config" });
|
||||
}
|
||||
};
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const acmeConfig = await (tx || db)(TableName.PkiAcmeEnrollmentConfig).where({ id }).first();
|
||||
|
||||
return acmeConfig || null;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find ACME enrollment config by id" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...acmeEnrollmentConfigOrm,
|
||||
create,
|
||||
updateById,
|
||||
findById
|
||||
...acmeEnrollmentConfigOrm
|
||||
};
|
||||
};
|
||||
|
||||
@@ -37,4 +37,6 @@ export interface TApiConfigData {
|
||||
renewBeforeDays?: number;
|
||||
}
|
||||
|
||||
export interface TAcmeConfigData {}
|
||||
export interface TAcmeConfigData {
|
||||
skipDnsOwnershipVerification?: boolean;
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import { TGroupDALFactory } from "../../ee/services/group/group-dal";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
|
||||
type TGroupProjectServiceFactoryDep = {
|
||||
groupDAL: Pick<TGroupDALFactory, "findOne" | "findAllGroupPossibleMembers">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findOne" | "findAllGroupPossibleUsers">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "findProjectGhostUser" | "findById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getProjectPermissionByRoles">;
|
||||
};
|
||||
@@ -51,7 +51,7 @@ export const groupProjectServiceFactory = ({
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
|
||||
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleUsers({
|
||||
orgId: project.orgId,
|
||||
groupId: id,
|
||||
offset,
|
||||
|
||||
@@ -32,7 +32,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
const removeExpiredTokens = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token started`);
|
||||
|
||||
const BATCH_SIZE = 10000;
|
||||
const BATCH_SIZE = 5000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
|
||||
const MAX_TTL = 315_360_000; // Maximum TTL value in seconds (10 years)
|
||||
@@ -101,7 +101,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
setTimeout(resolve, 500); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
|
||||
@@ -25,12 +25,27 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findIdentityProjects = async (identityId: string, orgId: string, projectType?: ProjectType) => {
|
||||
try {
|
||||
const identityGroupSubquery = db
|
||||
.replicaNode()(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, orgId)
|
||||
.where(`${TableName.IdentityGroupMembership}.identityId`, identityId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const workspaces = await db
|
||||
.replicaNode()(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Project)
|
||||
.where(`${TableName.Membership}.actorIdentityId`, identityId)
|
||||
.join(TableName.Project, `${TableName.Membership}.scopeProjectId`, `${TableName.Project}.id`)
|
||||
.where(`${TableName.Project}.orgId`, orgId)
|
||||
.andWhere((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorIdentityId`, identityId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupSubquery);
|
||||
})
|
||||
.andWhere((qb) => {
|
||||
if (projectType) {
|
||||
void qb.where(`${TableName.Project}.type`, projectType);
|
||||
@@ -347,11 +362,25 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.Groups}.orgId`, dto.orgId)
|
||||
.where(`${TableName.UserGroupMembership}.userId`, dto.actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const identityGroupMembershipSubquery = db
|
||||
.replicaNode()(TableName.Groups)
|
||||
.leftJoin(
|
||||
TableName.IdentityGroupMembership,
|
||||
`${TableName.IdentityGroupMembership}.groupId`,
|
||||
`${TableName.Groups}.id`
|
||||
)
|
||||
.where(`${TableName.Groups}.orgId`, dto.orgId)
|
||||
.where(`${TableName.IdentityGroupMembership}.identityId`, dto.actorId)
|
||||
.select(db.ref("id").withSchema(TableName.Groups));
|
||||
|
||||
const membershipSubQuery = db(TableName.Membership)
|
||||
.where(`${TableName.Membership}.scope`, AccessScope.Project)
|
||||
.where((qb) => {
|
||||
if (dto.actor === ActorType.IDENTITY) {
|
||||
void qb.where(`${TableName.Membership}.actorIdentityId`, dto.actorId);
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorIdentityId`, dto.actorId)
|
||||
.orWhereIn(`${TableName.Membership}.actorGroupId`, identityGroupMembershipSubquery);
|
||||
} else {
|
||||
void qb
|
||||
.where(`${TableName.Membership}.actorUserId`, dto.actorId)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user