Merge remote-tracking branch 'origin' into request-cert-guide

This commit is contained in:
Tuan Dang
2025-12-15 09:54:02 -08:00
646 changed files with 34889 additions and 14736 deletions

View File

@@ -1,23 +1,25 @@
# Description 📣
## Context
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. Here's how we expect a pull request to be : https://infisical.com/docs/contributing/getting-started/pull-requests -->
<!-- What problem does this solve? What was the behavior before, and what is it now? Add all relevant context. Link related issues/tickets. -->
## Type ✨
## Screenshots
- [ ] Bug fix
- [ ] New feature
<!-- If UI/UX changes, add screenshots or videos. Delete if not applicable. -->
## Steps to verify the change
## Type
- [ ] Fix
- [ ] Feature
- [ ] Improvement
- [ ] Breaking change
- [ ] Documentation
- [ ] Breaking
- [ ] Docs
- [ ] Chore
# Tests 🛠️
## Checklist
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration. You may want to add screenshots when relevant and possible -->
```sh
# Here's some code block to paste some code snippets
```
---
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/getting-started/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/getting-started/code-of-conduct). 📝
- [ ] Title follows the [conventional commit](https://www.conventionalcommits.org/en/v1.0.0/#summary) format: `type(scope): short description` (scope is optional, e.g., `fix: prevent crash on sync` or `fix(api): handle null response`).
- [ ] Tested locally
- [ ] Updated docs (if needed)
- [ ] Read the [contributing guide](https://infisical.com/docs/contributing/getting-started/overview)

55
.github/workflows/validate-pr-title.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: Validate PR Title
on:
pull_request:
types: [opened, edited, synchronize, reopened]
jobs:
validate-pr-title:
name: Validate PR Title Format
runs-on: ubuntu-latest
steps:
- name: Check PR Title Format
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const title = context.payload.pull_request.title;
// Valid PR types based on pull_request_template.md
const validTypes = ['fix', 'feature', 'improvement', 'breaking', 'docs', 'chore'];
// Regex pattern: type(optional-scope): short description
// - Type must be one of the valid types
// - Scope is optional, must be in parentheses, lowercase alphanumeric with hyphens
// - Followed by colon, space, and description (must start with lowercase letter)
const pattern = new RegExp(`^(${validTypes.join('|')})(\\([a-z0-9-]+\\))?: [a-z].+$`);
if (!pattern.test(title)) {
const errorMessage = `
❌ **Invalid PR Title Format**
Your PR title: \`${title}\`
**Expected format:** \`type(scope): short description\` (description must start with lowercase)
**Valid types:**
- \`fix\` - Bug fixes
- \`feature\` - New features
- \`improvement\` - Enhancements to existing features
- \`breaking\` - Breaking changes
- \`docs\` - Documentation updates
- \`chore\` - Maintenance tasks
**Scope:** Optional, short identifier in parentheses (e.g., \`(api)\`, \`(auth)\`, \`(ui)\`)
**Examples:**
- \`fix: prevent crash on sync\`
- \`fix(api): handle null response from auth endpoint\`
- \`docs(cli): update installation guide\`
`;
core.setFailed(errorMessage);
} else {
console.log(`✅ PR title is valid: "${title}"`);
}

View File

@@ -57,3 +57,6 @@ docs/documentation/platform/pki/enrollment-methods/api.mdx:generic-api-key:93
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139
docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx:private-key:62
docs/documentation/platform/pki/certificate-syncs/chef.mdx:private-key:61
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:246
backend/src/services/certificate-request/certificate-request-service.test.ts:private-key:248
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:142

View File

@@ -185,6 +185,9 @@ COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
# Make export-assets script executable for CDN asset extraction
RUN chmod +x /backend/scripts/export-assets.sh
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION

View File

@@ -174,6 +174,9 @@ ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
# Make export-assets script executable for CDN asset extraction
RUN chmod +x /backend/scripts/export-assets.sh
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION

View File

@@ -22,6 +22,28 @@ Feature: Challenge
And I parse the full-chain certificate from order finalized_order as cert
And the value cert with jq ".subject.common_name" should be equal to "localhost"
Scenario: Validate challenge with retry
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
When I create certificate signing request as csr
Then I add names to certificate signing request csr
"""
{
"COMMON_NAME": "localhost"
}
"""
And I create a RSA private key pair as cert_key
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
And I select challenge with type http-01 for domain localhost from order in order as challenge
And I wait 45 seconds and serve challenge response for challenge at localhost
And I tell ACME server that challenge is ready to be verified
And I poll and finalize the ACME order order as finalized_order
And the value finalized_order.body with jq ".status" should be equal to "valid"
And I parse the full-chain certificate from order finalized_order as cert
And the value cert with jq ".subject.common_name" should be equal to "localhost"
Scenario: Validate challenges for multiple domains
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
@@ -63,13 +85,12 @@ Feature: Challenge
When I create certificate signing request as csr
Then I add names to certificate signing request csr
"""
{
"COMMON_NAME": "localhost"
}
{}
"""
And I add subject alternative name to certificate signing request csr
"""
[
"localhost",
"infisical.com"
]
"""
@@ -82,56 +103,19 @@ Feature: Challenge
# the localhost auth should be valid
And I memorize order with jq ".authorizations | map(select(.body.identifier.value == "localhost")) | first | .uri" as localhost_auth
And I peak and memorize the next nonce as nonce
When I send a raw ACME request to "{localhost_auth}"
"""
{
"protected": {
"alg": "RS256",
"nonce": "{nonce}",
"url": "{localhost_auth}",
"kid": "{acme_account.uri}"
}
}
"""
Then the value response.status_code should be equal to 200
And the value response with jq ".status" should be equal to "valid"
And I wait until the status of authorization localhost_auth becomes valid
# the infisical.com auth should still be pending
And I memorize order with jq ".authorizations | map(select(.body.identifier.value == "infisical.com")) | first | .uri" as infisical_auth
And I memorize response.headers with jq ".["replay-nonce"]" as nonce
When I send a raw ACME request to "{infisical_auth}"
"""
{
"protected": {
"alg": "RS256",
"nonce": "{nonce}",
"url": "{infisical_auth}",
"kid": "{acme_account.uri}"
}
}
"""
Then the value response.status_code should be equal to 200
And the value response with jq ".status" should be equal to "pending"
And I post-as-get {infisical_auth} as infisical_auth_resp
And the value infisical_auth_resp with jq ".status" should be equal to "pending"
# the order should be pending as well
And I memorize response.headers with jq ".["replay-nonce"]" as nonce
When I send a raw ACME request to "{order.uri}"
"""
{
"protected": {
"alg": "RS256",
"nonce": "{nonce}",
"url": "{order.uri}",
"kid": "{acme_account.uri}"
}
}
"""
Then the value response.status_code should be equal to 200
And the value response with jq ".status" should be equal to "pending"
And I post-as-get {order.uri} as order_resp
And the value order_resp with jq ".status" should be equal to "pending"
# finalize should not be allowed when all auths are not valid yet
And I memorize response.headers with jq ".["replay-nonce"]" as nonce
And I get a new-nonce as nonce
When I send a raw ACME request to "{order.body.finalize}"
"""
{
@@ -185,8 +169,10 @@ Feature: Challenge
Then the value response.status_code should be equal to 201
And I memorize response with jq ".finalize" as finalize_url
And I memorize response.headers with jq ".["replay-nonce"]" as nonce
And I memorize response.headers with jq ".["location"]" as order_uri
And I memorize response as order
And I pass all challenges with type http-01 for order in order
And I wait until the status of order order_uri becomes ready
And I encode CSR csr_pem as JOSE Base-64 DER as base64_csr_der
When I send a raw ACME request to "{finalize_url}"
"""
@@ -206,3 +192,28 @@ Feature: Challenge
And the value response with jq ".status" should be equal to 400
And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:badCSR"
And the value response with jq ".detail" should be equal to "Invalid CSR: Common name + SANs mismatch with order identifiers"
Scenario: Get certificate without passing challenge when skip DNS ownership verification is enabled
Given I create an ACME profile with config as "acme_profile"
"""
{
"skipDnsOwnershipVerification": true
}
"""
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
When I create certificate signing request as csr
Then I add names to certificate signing request csr
"""
{
"COMMON_NAME": "localhost"
}
"""
And I create a RSA private key pair as cert_key
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
And the value order.body with jq ".status" should be equal to "ready"
And I poll and finalize the ACME order order as finalized_order
And the value finalized_order.body with jq ".status" should be equal to "valid"
And I parse the full-chain certificate from order finalized_order as cert
And the value cert with jq ".subject.common_name" should be equal to "localhost"

View File

@@ -369,3 +369,349 @@ Feature: External CA
| subject |
| {"COMMON_NAME": "localhost"} |
| {} |
Scenario Outline: Issue a certificate with bad CSR names disallowed by the template
Given I create a Cloudflare connection as cloudflare
Then I memorize cloudflare with jq ".appConnection.id" as app_conn_id
Given I create a external ACME CA with the following config as ext_ca
"""
{
"dnsProviderConfig": {
"provider": "cloudflare",
"hostedZoneId": "MOCK_ZONE_ID"
},
"directoryUrl": "{PEBBLE_URL}",
"accountEmail": "fangpen@infisical.com",
"dnsAppConnectionId": "{app_conn_id}",
"eabKid": "",
"eabHmacKey": ""
}
"""
Then I memorize ext_ca with jq ".id" as ext_ca_id
Given I create a certificate template with the following config as cert_template
"""
{
"subject": [
{
"type": "common_name",
"allowed": [
"example.com"
]
}
],
"sans": [
{
"type": "dns_name",
"allowed": [
"infisical.com"
]
}
],
"keyUsages": {
"required": [],
"allowed": [
"digital_signature",
"key_encipherment",
"non_repudiation",
"data_encipherment",
"key_agreement",
"key_cert_sign",
"crl_sign",
"encipher_only",
"decipher_only"
]
},
"extendedKeyUsages": {
"required": [],
"allowed": [
"client_auth",
"server_auth",
"code_signing",
"email_protection",
"ocsp_signing",
"time_stamping"
]
},
"algorithms": {
"signature": [
"SHA256-RSA",
"SHA512-RSA",
"SHA384-ECDSA",
"SHA384-RSA",
"SHA256-ECDSA",
"SHA512-ECDSA"
],
"keyAlgorithm": [
"RSA-2048",
"RSA-4096",
"ECDSA-P384",
"RSA-3072",
"ECDSA-P256",
"ECDSA-P521"
]
},
"validity": {
"max": "365d"
}
}
"""
Then I memorize cert_template with jq ".certificateTemplate.id" as cert_template_id
Given I create an ACME profile with ca {ext_ca_id} and template {cert_template_id} as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
When I create certificate signing request as csr
Then I add names to certificate signing request csr
"""
<subject>
"""
Then I add subject alternative name to certificate signing request csr
"""
<san>
"""
And I create a RSA private key pair as cert_key
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
And I pass all challenges with type http-01 for order in order
Given I intercept outgoing requests
"""
[
{
"scope": "https://api.cloudflare.com:443",
"method": "POST",
"path": "/client/v4/zones/MOCK_ZONE_ID/dns_records",
"status": 200,
"response": {
"result": {
"id": "A2A6347F-88B5-442D-9798-95E408BC7701",
"name": "Mock Account",
"type": "standard",
"settings": {
"enforce_twofactor": false,
"api_access_enabled": null,
"access_approval_expiry": null,
"abuse_contact_email": null,
"user_groups_ui_beta": false
},
"legacy_flags": {
"enterprise_zone_quota": {
"maximum": 0,
"current": 0,
"available": 0
}
},
"created_on": "2013-04-18T00:41:02.215243Z"
},
"success": true,
"errors": [],
"messages": []
},
"responseIsBinary": false
},
{
"scope": "https://api.cloudflare.com:443",
"method": "GET",
"path": {
"regex": "/client/v4/zones/[^/]+/dns_records\\?"
},
"status": 200,
"response": {
"result": [],
"success": true,
"errors": [],
"messages": [],
"result_info": {
"page": 1,
"per_page": 100,
"count": 0,
"total_count": 0,
"total_pages": 1
}
},
"responseIsBinary": false
}
]
"""
Then I poll and finalize the ACME order order as finalized_order
And the value error.typ should be equal to "urn:ietf:params:acme:error:badCSR"
And the value error.detail should be equal to "<err_detail>"
Examples:
| subject | san | err_detail |
| {"COMMON_NAME": "localhost"} | [] | Invalid CSR: common_name value 'localhost' is not in allowed values list |
| {"COMMON_NAME": "localhost"} | ["infisical.com"] | Invalid CSR: common_name value 'localhost' is not in allowed values list |
| {} | ["localhost"] | Invalid CSR: dns_name SAN value 'localhost' is not in allowed values list |
| {} | ["infisical.com", "localhost"] | Invalid CSR: dns_name SAN value 'localhost' is not in allowed values list |
| {"COMMON_NAME": "example.com"} | ["infisical.com", "localhost"] | Invalid CSR: dns_name SAN value 'localhost' is not in allowed values list |
Scenario Outline: Issue a certificate with algorithms disallowed by the template
Given I create a Cloudflare connection as cloudflare
Then I memorize cloudflare with jq ".appConnection.id" as app_conn_id
Given I create a external ACME CA with the following config as ext_ca
"""
{
"dnsProviderConfig": {
"provider": "cloudflare",
"hostedZoneId": "MOCK_ZONE_ID"
},
"directoryUrl": "{PEBBLE_URL}",
"accountEmail": "fangpen@infisical.com",
"dnsAppConnectionId": "{app_conn_id}",
"eabKid": "",
"eabHmacKey": ""
}
"""
Then I memorize ext_ca with jq ".id" as ext_ca_id
Given I create a certificate template with the following config as cert_template
"""
{
"subject": [
{
"type": "common_name",
"allowed": [
"*"
]
}
],
"sans": [
{
"type": "dns_name",
"allowed": [
"*"
]
}
],
"keyUsages": {
"required": [],
"allowed": [
"digital_signature",
"key_encipherment",
"non_repudiation",
"data_encipherment",
"key_agreement",
"key_cert_sign",
"crl_sign",
"encipher_only",
"decipher_only"
]
},
"extendedKeyUsages": {
"required": [],
"allowed": [
"client_auth",
"server_auth",
"code_signing",
"email_protection",
"ocsp_signing",
"time_stamping"
]
},
"algorithms": {
"signature": [
"<allowed_signature>"
],
"keyAlgorithm": [
"<allowed_alg>"
]
},
"validity": {
"max": "365d"
}
}
"""
Then I memorize cert_template with jq ".certificateTemplate.id" as cert_template_id
Given I create an ACME profile with ca {ext_ca_id} and template {cert_template_id} as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/cert-manager/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
When I create certificate signing request as csr
Then I add names to certificate signing request csr
"""
{}
"""
Then I add subject alternative name to certificate signing request csr
"""
[
"localhost"
]
"""
And I create a <key_type> private key pair as cert_key
And I sign the certificate signing request csr with "<hash_type>" hash and private key cert_key and output it as csr_pem in PEM format
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
And I pass all challenges with type http-01 for order in order
Given I intercept outgoing requests
"""
[
{
"scope": "https://api.cloudflare.com:443",
"method": "POST",
"path": "/client/v4/zones/MOCK_ZONE_ID/dns_records",
"status": 200,
"response": {
"result": {
"id": "A2A6347F-88B5-442D-9798-95E408BC7701",
"name": "Mock Account",
"type": "standard",
"settings": {
"enforce_twofactor": false,
"api_access_enabled": null,
"access_approval_expiry": null,
"abuse_contact_email": null,
"user_groups_ui_beta": false
},
"legacy_flags": {
"enterprise_zone_quota": {
"maximum": 0,
"current": 0,
"available": 0
}
},
"created_on": "2013-04-18T00:41:02.215243Z"
},
"success": true,
"errors": [],
"messages": []
},
"responseIsBinary": false
},
{
"scope": "https://api.cloudflare.com:443",
"method": "GET",
"path": {
"regex": "/client/v4/zones/[^/]+/dns_records\\?"
},
"status": 200,
"response": {
"result": [],
"success": true,
"errors": [],
"messages": [],
"result_info": {
"page": 1,
"per_page": 100,
"count": 0,
"total_count": 0,
"total_pages": 1
}
},
"responseIsBinary": false
}
]
"""
Then I poll and finalize the ACME order order as finalized_order
And the value error.typ should be equal to "urn:ietf:params:acme:error:badCSR"
And the value error.detail should be equal to "<err_detail>"
Examples:
| allowed_alg | allowed_signature | key_type | hash_type | err_detail |
| RSA-4096 | SHA512-RSA | RSA-2048 | SHA512 | Invalid CSR: Key algorithm 'RSA_2048' is not allowed by template policy |
| RSA-4096 | SHA512-RSA | RSA-3072 | SHA512 | Invalid CSR: Key algorithm 'RSA_3072' is not allowed by template policy |
| RSA-4096 | ECDSA-SHA512 | ECDSA-P256 | SHA512 | Invalid CSR: Key algorithm 'EC_prime256v1' is not allowed by template policy |
| RSA-4096 | ECDSA-SHA512 | ECDSA-P384 | SHA512 | Invalid CSR: Key algorithm 'EC_secp384r1' is not allowed by template policy |
| RSA-4096 | ECDSA-SHA512 | ECDSA-P521 | SHA512 | Invalid CSR: Key algorithm 'EC_secp521r1' is not allowed by template policy |
| RSA-2048 | SHA512-RSA | RSA-2048 | SHA384 | Invalid CSR: Signature algorithm 'RSA-SHA384' is not allowed by template policy |
| RSA-2048 | SHA512-RSA | RSA-2048 | SHA256 | Invalid CSR: Signature algorithm 'RSA-SHA256' is not allowed by template policy |
| ECDSA-P256 | SHA512-RSA | ECDSA-P256 | SHA256 | Invalid CSR: Signature algorithm 'ECDSA-SHA256' is not allowed by template policy |
| ECDSA-P384 | SHA512-RSA | ECDSA-P384 | SHA256 | Invalid CSR: Signature algorithm 'ECDSA-SHA256' is not allowed by template policy |
| ECDSA-P521 | SHA512-RSA | ECDSA-P521 | SHA256 | Invalid CSR: Signature algorithm 'ECDSA-SHA256' is not allowed by template policy |
| RSA-2048 | SHA512-RSA | RSA-2048 | SHA256 | Invalid CSR: Signature algorithm 'RSA-SHA256' is not allowed by template policy |
| RSA-2048 | SHA512-RSA | RSA-4096 | SHA256 | Invalid CSR: Signature algorithm 'RSA-SHA256' is not allowed by template policy, Key algorithm 'RSA_4096' is not allowed by template policy |

View File

@@ -2,6 +2,8 @@ import json
import logging
import re
import urllib.parse
import time
import threading
import acme.client
import jq
@@ -18,6 +20,10 @@ from josepy.jwk import JWKRSA
from josepy import json_util
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric.types import (
CertificateIssuerPrivateKeyTypes,
)
from cryptography import x509
from cryptography.x509.oid import NameOID
from cryptography.hazmat.primitives import hashes
@@ -260,6 +266,46 @@ def step_impl(context: Context, ca_id: str, template_id: str, profile_var: str):
)
@given(
'I create an ACME profile with config as "{profile_var}"'
)
def step_impl(context: Context, profile_var: str):
profile_slug = faker.slug()
jwt_token = context.vars["AUTH_TOKEN"]
acme_config = replace_vars(json.loads(context.text), context.vars)
response = context.http_client.post(
"/api/v1/cert-manager/certificate-profiles",
headers=dict(authorization="Bearer {}".format(jwt_token)),
json={
"projectId": context.vars["PROJECT_ID"],
"slug": profile_slug,
"description": "ACME Profile created by BDD test",
"enrollmentType": "acme",
"caId": context.vars["CERT_CA_ID"],
"certificateTemplateId": context.vars["CERT_TEMPLATE_ID"],
"acmeConfig": acme_config,
},
)
response.raise_for_status()
resp_json = response.json()
profile_id = resp_json["certificateProfile"]["id"]
kid = profile_id
response = context.http_client.get(
f"/api/v1/cert-manager/certificate-profiles/{profile_id}/acme/eab-secret/reveal",
headers=dict(authorization="Bearer {}".format(jwt_token)),
)
response.raise_for_status()
resp_json = response.json()
secret = resp_json["eabSecret"]
context.vars[profile_var] = AcmeProfile(
profile_id,
eab_kid=kid,
eab_secret=secret,
)
@given('I have an ACME cert profile with external ACME CA as "{profile_var}"')
def step_impl(context: Context, profile_var: str):
profile_id = context.vars.get("PROFILE_ID")
@@ -595,23 +641,67 @@ def step_impl(context: Context, csr_var: str):
)
@then("I create a RSA private key pair as {rsa_key_var}")
def step_impl(context: Context, rsa_key_var: str):
context.vars[rsa_key_var] = rsa.generate_private_key(
# TODO: make them configurable if we need to
def gen_private_key(key_type: str):
if key_type == "RSA-2048" or key_type == "RSA":
return rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
)
elif key_type == "RSA-3072":
return rsa.generate_private_key(
public_exponent=65537,
key_size=3072,
)
elif key_type == "RSA-4096":
return rsa.generate_private_key(
public_exponent=65537,
key_size=4096,
)
elif key_type == "ECDSA-P256":
return ec.generate_private_key(curve=ec.SECP256R1())
elif key_type == "ECDSA-P384":
return ec.generate_private_key(curve=ec.SECP384R1())
elif key_type == "ECDSA-P521":
return ec.generate_private_key(curve=ec.SECP521R1())
else:
raise Exception(f"Unknown key type {key_type}")
@then("I create a {key_type} private key pair as {rsa_key_var}")
def step_impl(context: Context, key_type: str, rsa_key_var: str):
context.vars[rsa_key_var] = gen_private_key(key_type)
def sign_csr(
pem: x509.CertificateSigningRequestBuilder,
pk: CertificateIssuerPrivateKeyTypes,
hash_type: str = "SHA256",
):
return pem.sign(pk, getattr(hashes, hash_type)()).public_bytes(
serialization.Encoding.PEM
)
@then(
'I sign the certificate signing request {csr_var} with "{hash_type}" hash and private key {pk_var} and output it as {pem_var} in PEM format'
)
def step_impl(
context: Context, csr_var: str, hash_type: str, pk_var: str, pem_var: str
):
context.vars[pem_var] = sign_csr(
pem=context.vars[csr_var],
pk=context.vars[pk_var],
hash_type=hash_type,
)
@then(
"I sign the certificate signing request {csr_var} with private key {pk_var} and output it as {pem_var} in PEM format"
)
def step_impl(context: Context, csr_var: str, pk_var: str, pem_var: str):
context.vars[pem_var] = (
context.vars[csr_var]
.sign(context.vars[pk_var], hashes.SHA256())
.public_bytes(serialization.Encoding.PEM)
context.vars[pem_var] = sign_csr(
pem=context.vars[csr_var],
pk=context.vars[pk_var],
)
@@ -724,6 +814,15 @@ def step_impl(context: Context, var_path: str, jq_query, var_name: str):
context.vars[var_name] = value
@then("I get a new-nonce as {var_name}")
def step_impl(context: Context, var_name: str):
acme_client = context.acme_client
nonce = acme_client.net._get_nonce(
url=None, new_nonce_url=acme_client.directory.newNonce
)
context.vars[var_name] = json_util.encode_b64jose(nonce)
@then("I peak and memorize the next nonce as {var_name}")
def step_impl(context: Context, var_name: str):
acme_client = context.acme_client
@@ -797,22 +896,39 @@ def select_challenge(
return challenges[0]
def serve_challenge(
def serve_challenges(
context: Context,
challenge: messages.ChallengeBody,
challenges: list[messages.ChallengeBody],
wait_time: int | None = None,
):
if hasattr(context, "web_server"):
context.web_server.shutdown_and_server_close()
resources = set()
for challenge in challenges:
response, validation = challenge.response_and_validation(
context.acme_client.net.key
)
resource = standalone.HTTP01RequestHandler.HTTP01Resource(
resources.add(
standalone.HTTP01RequestHandler.HTTP01Resource(
chall=challenge.chall, response=response, validation=validation
)
)
# TODO: make port configurable
servers = standalone.HTTP01DualNetworkedServers(("0.0.0.0", 8087), {resource})
servers = standalone.HTTP01DualNetworkedServers(("0.0.0.0", 8087), resources)
if wait_time is None:
servers.serve_forever()
else:
def wait_and_start():
logger.info("Waiting %s seconds before we start serving.", wait_time)
time.sleep(wait_time)
logger.info("Start server now")
servers.serve_forever()
thread = threading.Thread(target=wait_and_start)
thread.daemon = True
thread.start()
context.web_server = servers
@@ -865,6 +981,7 @@ def step_impl(
f"Expected OrderResource but got {type(order)!r} at {order_var_path!r}"
)
challenges = {}
for domain in order.body.identifiers:
logger.info(
"Selecting challenge for domain %s with type %s ...",
@@ -889,18 +1006,28 @@ def step_impl(
domain.value,
challenge_type,
)
serve_challenge(context=context, challenge=challenge)
challenges[domain] = challenge
serve_challenges(context=context, challenges=list(challenges.values()))
for domain, challenge in challenges.items():
logger.info(
"Notifying challenge for domain %s with type %s ...", domain, challenge_type
)
notify_challenge_ready(context=context, challenge=challenge)
@then(
"I wait {wait_time} seconds and serve challenge response for {var_path} at {hostname}"
)
def step_impl(context: Context, wait_time: str, var_path: str, hostname: str):
challenge = eval_var(context, var_path, as_json=False)
serve_challenges(context=context, challenges=[challenge], wait_time=int(wait_time))
@then("I serve challenge response for {var_path} at {hostname}")
def step_impl(context: Context, var_path: str, hostname: str):
challenge = eval_var(context, var_path, as_json=False)
serve_challenge(context=context, challenge=challenge)
serve_challenges(context=context, challenges=[challenge])
@then("I tell ACME server that {var_path} is ready to be verified")
@@ -909,12 +1036,57 @@ def step_impl(context: Context, var_path: str):
notify_challenge_ready(context=context, challenge=challenge)
@then("I wait until the status of order {order_var} becomes {status}")
def step_impl(context: Context, order_var: str, status: str):
acme_client = context.acme_client
attempt_count = 6
while attempt_count:
order = eval_var(context, order_var, as_json=False)
response = acme_client._post_as_get(
order.uri if isinstance(order, messages.OrderResource) else order
)
order = messages.Order.from_json(response.json())
if order.status.name == status:
return
attempt_count -= 1
time.sleep(10)
raise TimeoutError(f"The status of order doesn't become {status} before timeout")
@then("I wait until the status of authorization {auth_var} becomes {status}")
def step_impl(context: Context, auth_var: str, status: str):
acme_client = context.acme_client
attempt_count = 6
while attempt_count:
auth = eval_var(context, auth_var, as_json=False)
response = acme_client._post_as_get(
auth.uri if isinstance(auth, messages.Authorization) else auth
)
auth = messages.Authorization.from_json(response.json())
if auth.status.name == status:
return
attempt_count -= 1
time.sleep(10)
raise TimeoutError(f"The status of auth doesn't become {status} before timeout")
@then("I post-as-get {uri} as {resp_var}")
def step_impl(context: Context, uri: str, resp_var: str):
acme_client = context.acme_client
response = acme_client._post_as_get(replace_vars(uri, vars=context.vars))
context.vars[resp_var] = response.json()
@then("I poll and finalize the ACME order {var_path} as {finalized_var}")
def step_impl(context: Context, var_path: str, finalized_var: str):
order = eval_var(context, var_path, as_json=False)
acme_client = context.acme_client
try:
finalized_order = acme_client.poll_and_finalize(order)
context.vars[finalized_var] = finalized_order
except Exception as exp:
logger.error(f"Failed to finalize order: {exp}", exc_info=True)
context.vars["error"] = exp
@then("I parse the full-chain certificate from order {order_var_path} as {cert_var}")

3912
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,6 +25,7 @@
"outputPath": "binary"
},
"scripts": {
"assets:export": "./scripts/export-assets.sh",
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
"binary:babel-backend": " babel ./dist -d ./dist",
@@ -90,7 +91,7 @@
"@babel/plugin-syntax-import-attributes": "^7.24.7",
"@babel/preset-env": "^7.18.10",
"@babel/preset-react": "^7.24.7",
"@react-email/preview-server": "^4.3.0",
"@react-email/preview-server": "^5.0.6",
"@smithy/types": "^4.3.1",
"@types/bcrypt": "^5.0.2",
"@types/jmespath": "^0.15.2",
@@ -128,7 +129,7 @@
"nodemon": "^3.0.2",
"pino-pretty": "^10.2.3",
"prompt-sync": "^4.2.0",
"react-email": "^4.3.0",
"react-email": "^5.0.6",
"rimraf": "^5.0.5",
"ts-node": "^10.9.2",
"tsc-alias": "^1.8.8",
@@ -183,7 +184,7 @@
"@opentelemetry/semantic-conventions": "^1.27.0",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@react-email/components": "0.0.36",
"@react-email/components": "^1.0.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.2",

View File

@@ -0,0 +1,75 @@
#!/bin/sh
# Export frontend static assets for CDN deployment
# Usage:
# npm run assets:export - Output tar to stdout (pipe to file or aws s3)
# npm run assets:export /path - Extract assets to specified directory
# npm run assets:export -- --help - Show usage
set -e
ASSETS_PATH="/backend/frontend-build/assets"
show_help() {
cat << 'EOF'
Export frontend static assets for CDN deployment.
USAGE:
docker run --rm infisical/infisical npm run --silent assets:export [-- OPTIONS] [PATH]
OPTIONS:
--help, -h Show this help message
ARGUMENTS:
PATH Directory to export assets to. If not provided, outputs
a tar archive to stdout.
NOTE:
Use --silent flag to suppress npm output when piping to stdout.
EXAMPLES:
# Export as tar to local file
docker run --rm infisical/infisical npm run --silent assets:export > assets.tar
# Extract to local directory
docker run --rm -v $(pwd)/cdn-assets:/output infisical/infisical npm run --silent assets:export /output
EOF
exit 0
}
# Check for help flag
case "${1:-}" in
--help|-h)
show_help
;;
esac
# Verify assets exist
if [ ! -d "$ASSETS_PATH" ]; then
echo "Error: Assets directory not found at $ASSETS_PATH" >&2
echo "Make sure the frontend is built and included in the image." >&2
exit 1
fi
ASSET_COUNT=$(find "$ASSETS_PATH" -type f | wc -l | tr -d ' ')
if [ $# -eq 0 ]; then
# No path provided - output tar to stdout
echo "Exporting $ASSET_COUNT assets as tar archive to stdout..." >&2
tar -cf - -C "$(dirname "$ASSETS_PATH")" "$(basename "$ASSETS_PATH")"
else
# Path provided - extract to directory
OUTPUT_PATH="$1"
if [ ! -d "$OUTPUT_PATH" ]; then
echo "Creating output directory: $OUTPUT_PATH" >&2
mkdir -p "$OUTPUT_PATH"
fi
echo "Exporting $ASSET_COUNT assets to $OUTPUT_PATH..." >&2
cp -r "$ASSETS_PATH"/* "$OUTPUT_PATH/"
echo "✅ Assets exported successfully!" >&2
echo " Path: $OUTPUT_PATH" >&2
echo " Files: $ASSET_COUNT assets" >&2
fi

View File

@@ -55,6 +55,7 @@ import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TAdditionalPrivilegeServiceFactory } from "@app/services/additional-privilege/additional-privilege-service";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TApprovalPolicyServiceFactory } from "@app/services/approval-policy/approval-policy-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
@@ -65,6 +66,7 @@ import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-a
import { TInternalCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/internal/internal-certificate-authority-service";
import { TCertificateEstV3ServiceFactory } from "@app/services/certificate-est-v3/certificate-est-v3-service";
import { TCertificateProfileServiceFactory } from "@app/services/certificate-profile/certificate-profile-service";
import { TCertificateRequestServiceFactory } from "@app/services/certificate-request/certificate-request-service";
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
import { TCertificateTemplateV2ServiceFactory } from "@app/services/certificate-template-v2/certificate-template-v2-service";
import { TCertificateV3ServiceFactory } from "@app/services/certificate-v3/certificate-v3-service";
@@ -288,6 +290,7 @@ declare module "fastify" {
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateV3: TCertificateV3ServiceFactory;
certificateRequest: TCertificateRequestServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
certificateTemplateV2: TCertificateTemplateV2ServiceFactory;
certificateProfile: TCertificateProfileServiceFactory;
@@ -359,6 +362,7 @@ declare module "fastify" {
convertor: TConvertorServiceFactory;
subOrganization: TSubOrgServiceFactory;
pkiAlertV2: TPkiAlertV2ServiceFactory;
approvalPolicy: TApprovalPolicyServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@@ -26,6 +26,30 @@ import {
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate,
TApprovalPolicies,
TApprovalPoliciesInsert,
TApprovalPoliciesUpdate,
TApprovalPolicyStepApprovers,
TApprovalPolicyStepApproversInsert,
TApprovalPolicyStepApproversUpdate,
TApprovalPolicySteps,
TApprovalPolicyStepsInsert,
TApprovalPolicyStepsUpdate,
TApprovalRequestApprovals,
TApprovalRequestApprovalsInsert,
TApprovalRequestApprovalsUpdate,
TApprovalRequestGrants,
TApprovalRequestGrantsInsert,
TApprovalRequestGrantsUpdate,
TApprovalRequests,
TApprovalRequestsInsert,
TApprovalRequestStepEligibleApprovers,
TApprovalRequestStepEligibleApproversInsert,
TApprovalRequestStepEligibleApproversUpdate,
TApprovalRequestSteps,
TApprovalRequestStepsInsert,
TApprovalRequestStepsUpdate,
TApprovalRequestsUpdate,
TAuditLogs,
TAuditLogsInsert,
TAuditLogStreams,
@@ -578,6 +602,11 @@ import {
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/access-approval-policies-environments";
import {
TCertificateRequests,
TCertificateRequestsInsert,
TCertificateRequestsUpdate
} from "@app/db/schemas/certificate-requests";
import {
TIdentityAuthTemplates,
TIdentityAuthTemplatesInsert,
@@ -714,6 +743,11 @@ declare module "knex/types/tables" {
TExternalCertificateAuthoritiesUpdate
>;
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateRequests]: KnexOriginal.CompositeTableType<
TCertificateRequests,
TCertificateRequestsInsert,
TCertificateRequestsUpdate
>;
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
TCertificateTemplates,
TCertificateTemplatesInsert,
@@ -1465,5 +1499,45 @@ declare module "knex/types/tables" {
TVaultExternalMigrationConfigsInsert,
TVaultExternalMigrationConfigsUpdate
>;
[TableName.ApprovalPolicies]: KnexOriginal.CompositeTableType<
TApprovalPolicies,
TApprovalPoliciesInsert,
TApprovalPoliciesUpdate
>;
[TableName.ApprovalPolicyStepApprovers]: KnexOriginal.CompositeTableType<
TApprovalPolicyStepApprovers,
TApprovalPolicyStepApproversInsert,
TApprovalPolicyStepApproversUpdate
>;
[TableName.ApprovalPolicySteps]: KnexOriginal.CompositeTableType<
TApprovalPolicySteps,
TApprovalPolicyStepsInsert,
TApprovalPolicyStepsUpdate
>;
[TableName.ApprovalRequestApprovals]: KnexOriginal.CompositeTableType<
TApprovalRequestApprovals,
TApprovalRequestApprovalsInsert,
TApprovalRequestApprovalsUpdate
>;
[TableName.ApprovalRequestGrants]: KnexOriginal.CompositeTableType<
TApprovalRequestGrants,
TApprovalRequestGrantsInsert,
TApprovalRequestGrantsUpdate
>;
[TableName.ApprovalRequestStepEligibleApprovers]: KnexOriginal.CompositeTableType<
TApprovalRequestStepEligibleApprovers,
TApprovalRequestStepEligibleApproversInsert,
TApprovalRequestStepEligibleApproversUpdate
>;
[TableName.ApprovalRequestSteps]: KnexOriginal.CompositeTableType<
TApprovalRequestSteps,
TApprovalRequestStepsInsert,
TApprovalRequestStepsUpdate
>;
[TableName.ApprovalRequests]: KnexOriginal.CompositeTableType<
TApprovalRequests,
TApprovalRequestsInsert,
TApprovalRequestsUpdate
>;
}
}

View File

@@ -0,0 +1,47 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.CertificateRequests))) {
await knex.schema.createTable(TableName.CertificateRequests, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("status").notNullable();
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("profileId").nullable();
t.foreign("profileId").references("id").inTable(TableName.PkiCertificateProfile).onDelete("SET NULL");
t.uuid("caId").nullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("SET NULL");
t.uuid("certificateId").nullable();
t.foreign("certificateId").references("id").inTable(TableName.Certificate).onDelete("SET NULL");
t.text("csr").nullable();
t.string("commonName").nullable();
t.text("altNames").nullable();
t.specificType("keyUsages", "text[]").nullable();
t.specificType("extendedKeyUsages", "text[]").nullable();
t.datetime("notBefore").nullable();
t.datetime("notAfter").nullable();
t.string("keyAlgorithm").nullable();
t.string("signatureAlgorithm").nullable();
t.text("errorMessage").nullable();
t.text("metadata").nullable();
t.index(["projectId"]);
t.index(["status"]);
t.index(["profileId"]);
t.index(["caId"]);
t.index(["certificateId"]);
t.index(["createdAt"]);
});
}
await createOnUpdateTrigger(knex, TableName.CertificateRequests);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.CertificateRequests);
await dropOnUpdateTrigger(knex, TableName.CertificateRequests);
}

View File

@@ -0,0 +1,22 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSubOrganizationIdColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "subOrganizationId");
if (!hasSubOrganizationIdColumn) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.uuid("subOrganizationId").nullable();
t.foreign("subOrganizationId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasSubOrganizationIdColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "subOrganizationId");
if (hasSubOrganizationIdColumn) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.dropColumn("subOrganizationId");
});
}
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasExternalConfigs = await knex.schema.hasColumn(TableName.PkiCertificateProfile, "externalConfigs");
if (!hasExternalConfigs) {
await knex.schema.alterTable(TableName.PkiCertificateProfile, (t) => {
t.text("externalConfigs").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasExternalConfigs = await knex.schema.hasColumn(TableName.PkiCertificateProfile, "externalConfigs");
if (hasExternalConfigs) {
await knex.schema.alterTable(TableName.PkiCertificateProfile, (t) => {
t.dropColumn("externalConfigs");
});
}
}

View File

@@ -0,0 +1,194 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ApprovalPolicies))) {
await knex.schema.createTable(TableName.ApprovalPolicies, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().index();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("organizationId").notNullable().index();
t.foreign("organizationId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("type").notNullable().index();
t.string("name").notNullable();
t.boolean("isActive").defaultTo(true);
t.string("maxRequestTtl").nullable(); // 1hour, 30seconds, etc
t.jsonb("conditions").notNullable();
t.jsonb("constraints").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ApprovalPolicies);
}
if (!(await knex.schema.hasTable(TableName.ApprovalPolicySteps))) {
await knex.schema.createTable(TableName.ApprovalPolicySteps, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable().index();
t.foreign("policyId").references("id").inTable(TableName.ApprovalPolicies).onDelete("CASCADE");
t.string("name").nullable();
t.integer("stepNumber").notNullable();
t.integer("requiredApprovals").notNullable();
t.boolean("notifyApprovers").defaultTo(false);
});
}
if (!(await knex.schema.hasTable(TableName.ApprovalPolicyStepApprovers))) {
await knex.schema.createTable(TableName.ApprovalPolicyStepApprovers, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyStepId").notNullable().index();
t.foreign("policyStepId").references("id").inTable(TableName.ApprovalPolicySteps).onDelete("CASCADE");
t.uuid("userId").nullable().index();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("groupId").nullable().index();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.check('("userId" IS NOT NULL AND "groupId" IS NULL) OR ("userId" IS NULL AND "groupId" IS NOT NULL)');
});
}
if (!(await knex.schema.hasTable(TableName.ApprovalRequests))) {
await knex.schema.createTable(TableName.ApprovalRequests, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().index();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("organizationId").notNullable().index();
t.foreign("organizationId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.uuid("policyId").nullable().index();
t.foreign("policyId").references("id").inTable(TableName.ApprovalPolicies).onDelete("SET NULL");
t.uuid("requesterId").nullable().index();
t.foreign("requesterId").references("id").inTable(TableName.Users).onDelete("SET NULL");
// To be used in the event of requester deletion
t.string("requesterName").notNullable();
t.string("requesterEmail").notNullable();
t.string("type").notNullable().index();
t.string("status").notNullable().index();
t.text("justification").nullable();
t.integer("currentStep").notNullable();
t.jsonb("requestData").notNullable();
t.timestamp("expiresAt").nullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ApprovalRequests);
}
if (!(await knex.schema.hasTable(TableName.ApprovalRequestSteps))) {
await knex.schema.createTable(TableName.ApprovalRequestSteps, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("requestId").notNullable().index();
t.foreign("requestId").references("id").inTable(TableName.ApprovalRequests).onDelete("CASCADE");
t.integer("stepNumber").notNullable();
t.string("name").nullable();
t.string("status").notNullable().index();
t.integer("requiredApprovals").notNullable();
t.boolean("notifyApprovers").defaultTo(false);
t.timestamp("startedAt").nullable();
t.timestamp("completedAt").nullable();
});
}
if (!(await knex.schema.hasTable(TableName.ApprovalRequestStepEligibleApprovers))) {
await knex.schema.createTable(TableName.ApprovalRequestStepEligibleApprovers, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("stepId").notNullable().index();
t.foreign("stepId").references("id").inTable(TableName.ApprovalRequestSteps).onDelete("CASCADE");
t.uuid("userId").nullable().index();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("groupId").nullable().index();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.check('("userId" IS NOT NULL AND "groupId" IS NULL) OR ("userId" IS NULL AND "groupId" IS NOT NULL)');
});
}
if (!(await knex.schema.hasTable(TableName.ApprovalRequestApprovals))) {
await knex.schema.createTable(TableName.ApprovalRequestApprovals, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("stepId").notNullable().index();
t.foreign("stepId").references("id").inTable(TableName.ApprovalRequestSteps).onDelete("CASCADE");
t.uuid("approverUserId").notNullable().index();
t.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.string("decision").notNullable();
t.text("comment").nullable();
t.timestamp("createdAt").defaultTo(knex.fn.now());
});
}
if (!(await knex.schema.hasTable(TableName.ApprovalRequestGrants))) {
await knex.schema.createTable(TableName.ApprovalRequestGrants, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().index();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("requestId").nullable().index();
t.foreign("requestId").references("id").inTable(TableName.ApprovalRequests).onDelete("SET NULL");
t.uuid("granteeUserId").nullable().index();
t.foreign("granteeUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
t.uuid("revokedByUserId").nullable().index();
t.foreign("revokedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
t.text("revocationReason").nullable();
t.string("status").notNullable().index();
t.string("type").notNullable().index();
t.jsonb("attributes").notNullable();
t.timestamp("createdAt").defaultTo(knex.fn.now());
t.timestamp("expiresAt").nullable();
t.timestamp("revokedAt").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ApprovalRequestGrants);
await knex.schema.dropTableIfExists(TableName.ApprovalRequestApprovals);
await knex.schema.dropTableIfExists(TableName.ApprovalRequestStepEligibleApprovers);
await knex.schema.dropTableIfExists(TableName.ApprovalRequestSteps);
await knex.schema.dropTableIfExists(TableName.ApprovalRequests);
await knex.schema.dropTableIfExists(TableName.ApprovalPolicyStepApprovers);
await knex.schema.dropTableIfExists(TableName.ApprovalPolicySteps);
await knex.schema.dropTableIfExists(TableName.ApprovalPolicies);
await dropOnUpdateTrigger(knex, TableName.ApprovalRequests);
await dropOnUpdateTrigger(knex, TableName.ApprovalPolicies);
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasGatewayId = await knex.schema.hasColumn(TableName.PamResource, "gatewayId");
if (hasGatewayId) {
await knex.schema.alterTable(TableName.PamResource, (t) => {
t.uuid("gatewayId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasGatewayId = await knex.schema.hasColumn(TableName.PamResource, "gatewayId");
if (hasGatewayId) {
await knex.schema.alterTable(TableName.PamResource, (t) => {
t.uuid("gatewayId").notNullable().alter();
});
}
}

View File

@@ -0,0 +1,38 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { dropConstraintIfExists } from "./utils/dropConstraintIfExists";
const FOREIGN_KEY_CONSTRAINT_NAME = "certificate_requests_acme_order_id_fkey";
const INDEX_NAME = "certificate_requests_acme_order_id_idx";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateRequests)) {
const hasAcmeOrderId = await knex.schema.hasColumn(TableName.CertificateRequests, "acmeOrderId");
if (!hasAcmeOrderId) {
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
t.uuid("acmeOrderId").nullable();
t.foreign("acmeOrderId", FOREIGN_KEY_CONSTRAINT_NAME)
.references("id")
.inTable(TableName.PkiAcmeOrder)
.onDelete("SET NULL");
t.index("acmeOrderId", INDEX_NAME);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateRequests)) {
const hasAcmeOrderId = await knex.schema.hasColumn(TableName.CertificateRequests, "acmeOrderId");
if (hasAcmeOrderId) {
await dropConstraintIfExists(TableName.CertificateRequests, FOREIGN_KEY_CONSTRAINT_NAME, knex);
await knex.schema.alterTable(TableName.CertificateRequests, (t) => {
t.dropIndex("acmeOrderId", INDEX_NAME);
t.dropColumn("acmeOrderId");
});
}
}
}

View File

@@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.PkiAcmeEnrollmentConfig)) {
if (!(await knex.schema.hasColumn(TableName.PkiAcmeEnrollmentConfig, "skipDnsOwnershipVerification"))) {
await knex.schema.alterTable(TableName.PkiAcmeEnrollmentConfig, (t) => {
t.boolean("skipDnsOwnershipVerification").defaultTo(false).notNullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.PkiAcmeEnrollmentConfig)) {
if (await knex.schema.hasColumn(TableName.PkiAcmeEnrollmentConfig, "skipDnsOwnershipVerification")) {
await knex.schema.alterTable(TableName.PkiAcmeEnrollmentConfig, (t) => {
t.dropColumn("skipDnsOwnershipVerification");
});
}
}
}

View File

@@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalPoliciesSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
organizationId: z.string().uuid(),
type: z.string(),
name: z.string(),
isActive: z.boolean().default(true).nullable().optional(),
maxRequestTtl: z.string().nullable().optional(),
conditions: z.unknown(),
constraints: z.unknown(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TApprovalPolicies = z.infer<typeof ApprovalPoliciesSchema>;
export type TApprovalPoliciesInsert = Omit<z.input<typeof ApprovalPoliciesSchema>, TImmutableDBKeys>;
export type TApprovalPoliciesUpdate = Partial<Omit<z.input<typeof ApprovalPoliciesSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalPolicyStepApproversSchema = z.object({
id: z.string().uuid(),
policyStepId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
groupId: z.string().uuid().nullable().optional()
});
export type TApprovalPolicyStepApprovers = z.infer<typeof ApprovalPolicyStepApproversSchema>;
export type TApprovalPolicyStepApproversInsert = Omit<
z.input<typeof ApprovalPolicyStepApproversSchema>,
TImmutableDBKeys
>;
export type TApprovalPolicyStepApproversUpdate = Partial<
Omit<z.input<typeof ApprovalPolicyStepApproversSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalPolicyStepsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
name: z.string().nullable().optional(),
stepNumber: z.number(),
requiredApprovals: z.number(),
notifyApprovers: z.boolean().default(false).nullable().optional()
});
export type TApprovalPolicySteps = z.infer<typeof ApprovalPolicyStepsSchema>;
export type TApprovalPolicyStepsInsert = Omit<z.input<typeof ApprovalPolicyStepsSchema>, TImmutableDBKeys>;
export type TApprovalPolicyStepsUpdate = Partial<Omit<z.input<typeof ApprovalPolicyStepsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalRequestApprovalsSchema = z.object({
id: z.string().uuid(),
stepId: z.string().uuid(),
approverUserId: z.string().uuid(),
decision: z.string(),
comment: z.string().nullable().optional(),
createdAt: z.date().nullable().optional()
});
export type TApprovalRequestApprovals = z.infer<typeof ApprovalRequestApprovalsSchema>;
export type TApprovalRequestApprovalsInsert = Omit<z.input<typeof ApprovalRequestApprovalsSchema>, TImmutableDBKeys>;
export type TApprovalRequestApprovalsUpdate = Partial<
Omit<z.input<typeof ApprovalRequestApprovalsSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,27 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalRequestGrantsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
requestId: z.string().uuid().nullable().optional(),
granteeUserId: z.string().uuid().nullable().optional(),
revokedByUserId: z.string().uuid().nullable().optional(),
revocationReason: z.string().nullable().optional(),
status: z.string(),
type: z.string(),
attributes: z.unknown(),
createdAt: z.date().nullable().optional(),
expiresAt: z.date().nullable().optional(),
revokedAt: z.date().nullable().optional()
});
export type TApprovalRequestGrants = z.infer<typeof ApprovalRequestGrantsSchema>;
export type TApprovalRequestGrantsInsert = Omit<z.input<typeof ApprovalRequestGrantsSchema>, TImmutableDBKeys>;
export type TApprovalRequestGrantsUpdate = Partial<Omit<z.input<typeof ApprovalRequestGrantsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalRequestStepEligibleApproversSchema = z.object({
id: z.string().uuid(),
stepId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
groupId: z.string().uuid().nullable().optional()
});
export type TApprovalRequestStepEligibleApprovers = z.infer<typeof ApprovalRequestStepEligibleApproversSchema>;
export type TApprovalRequestStepEligibleApproversInsert = Omit<
z.input<typeof ApprovalRequestStepEligibleApproversSchema>,
TImmutableDBKeys
>;
export type TApprovalRequestStepEligibleApproversUpdate = Partial<
Omit<z.input<typeof ApprovalRequestStepEligibleApproversSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalRequestStepsSchema = z.object({
id: z.string().uuid(),
requestId: z.string().uuid(),
stepNumber: z.number(),
name: z.string().nullable().optional(),
status: z.string(),
requiredApprovals: z.number(),
notifyApprovers: z.boolean().default(false).nullable().optional(),
startedAt: z.date().nullable().optional(),
completedAt: z.date().nullable().optional()
});
export type TApprovalRequestSteps = z.infer<typeof ApprovalRequestStepsSchema>;
export type TApprovalRequestStepsInsert = Omit<z.input<typeof ApprovalRequestStepsSchema>, TImmutableDBKeys>;
export type TApprovalRequestStepsUpdate = Partial<Omit<z.input<typeof ApprovalRequestStepsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,30 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ApprovalRequestsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
organizationId: z.string().uuid(),
policyId: z.string().uuid().nullable().optional(),
requesterId: z.string().uuid().nullable().optional(),
requesterName: z.string(),
requesterEmail: z.string(),
type: z.string(),
status: z.string(),
justification: z.string().nullable().optional(),
currentStep: z.number(),
requestData: z.unknown(),
expiresAt: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TApprovalRequests = z.infer<typeof ApprovalRequestsSchema>;
export type TApprovalRequestsInsert = Omit<z.input<typeof ApprovalRequestsSchema>, TImmutableDBKeys>;
export type TApprovalRequestsUpdate = Partial<Omit<z.input<typeof ApprovalRequestsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,35 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificateRequestsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
status: z.string(),
projectId: z.string(),
profileId: z.string().uuid().nullable().optional(),
caId: z.string().uuid().nullable().optional(),
certificateId: z.string().uuid().nullable().optional(),
csr: z.string().nullable().optional(),
commonName: z.string().nullable().optional(),
altNames: z.string().nullable().optional(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional(),
notBefore: z.date().nullable().optional(),
notAfter: z.date().nullable().optional(),
keyAlgorithm: z.string().nullable().optional(),
signatureAlgorithm: z.string().nullable().optional(),
errorMessage: z.string().nullable().optional(),
metadata: z.string().nullable().optional(),
acmeOrderId: z.string().uuid().nullable().optional()
});
export type TCertificateRequests = z.infer<typeof CertificateRequestsSchema>;
export type TCertificateRequestsInsert = Omit<z.input<typeof CertificateRequestsSchema>, TImmutableDBKeys>;
export type TCertificateRequestsUpdate = Partial<Omit<z.input<typeof CertificateRequestsSchema>, TImmutableDBKeys>>;

View File

@@ -22,7 +22,8 @@ export const IdentityAccessTokensSchema = z.object({
updatedAt: z.date(),
name: z.string().nullable().optional(),
authMethod: z.string(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
subOrganizationId: z.string().uuid().nullable().optional()
});
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;

View File

@@ -6,6 +6,14 @@ export * from "./access-approval-requests-reviewers";
export * from "./additional-privileges";
export * from "./api-keys";
export * from "./app-connections";
export * from "./approval-policies";
export * from "./approval-policy-step-approvers";
export * from "./approval-policy-steps";
export * from "./approval-request-approvals";
export * from "./approval-request-grants";
export * from "./approval-request-step-eligible-approvers";
export * from "./approval-request-steps";
export * from "./approval-requests";
export * from "./audit-log-streams";
export * from "./audit-logs";
export * from "./auth-token-sessions";
@@ -16,6 +24,7 @@ export * from "./certificate-authority-certs";
export * from "./certificate-authority-crl";
export * from "./certificate-authority-secret";
export * from "./certificate-bodies";
export * from "./certificate-requests";
export * from "./certificate-secrets";
export * from "./certificate-syncs";
export * from "./certificate-template-est-configs";

View File

@@ -21,6 +21,7 @@ export enum TableName {
CertificateAuthorityCrl = "certificate_authority_crl",
Certificate = "certificates",
CertificateBody = "certificate_bodies",
CertificateRequests = "certificate_requests",
CertificateSecret = "certificate_secrets",
CertificateTemplate = "certificate_templates",
PkiCertificateTemplateV2 = "pki_certificate_templates_v2",
@@ -222,7 +223,17 @@ export enum TableName {
PkiAcmeOrder = "pki_acme_orders",
PkiAcmeOrderAuth = "pki_acme_order_auths",
PkiAcmeAuth = "pki_acme_auths",
PkiAcmeChallenge = "pki_acme_challenges"
PkiAcmeChallenge = "pki_acme_challenges",
// Approval Policies
ApprovalPolicies = "approval_policies",
ApprovalPolicySteps = "approval_policy_steps",
ApprovalPolicyStepApprovers = "approval_policy_step_approvers",
ApprovalRequests = "approval_requests",
ApprovalRequestSteps = "approval_request_steps",
ApprovalRequestStepEligibleApprovers = "approval_request_step_eligible_approvers",
ApprovalRequestApprovals = "approval_request_approvals",
ApprovalRequestGrants = "approval_request_grants"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";

View File

@@ -13,7 +13,7 @@ export const PamResourcesSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
name: z.string(),
gatewayId: z.string().uuid(),
gatewayId: z.string().uuid().nullable().optional(),
resourceType: z.string(),
encryptedConnectionDetails: zodBuffer,
createdAt: z.date(),

View File

@@ -13,7 +13,8 @@ export const PkiAcmeEnrollmentConfigsSchema = z.object({
id: z.string().uuid(),
encryptedEabSecret: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
skipDnsOwnershipVerification: z.boolean().default(false)
});
export type TPkiAcmeEnrollmentConfigs = z.infer<typeof PkiAcmeEnrollmentConfigsSchema>;

View File

@@ -20,7 +20,8 @@ export const PkiCertificateProfilesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
acmeConfigId: z.string().uuid().nullable().optional(),
issuerType: z.string().default("ca")
issuerType: z.string().default("ca"),
externalConfigs: z.string().nullable().optional()
});
export type TPkiCertificateProfiles = z.infer<typeof PkiCertificateProfilesSchema>;

View File

@@ -4,15 +4,10 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
ExternalKmsAwsSchema,
ExternalKmsGcpCredentialSchema,
ExternalKmsGcpSchema,
ExternalKmsInputSchema,
ExternalKmsInputUpdateSchema,
KmsGcpKeyFetchAuthType,
KmsProviders,
TExternalKmsGcpCredentialSchema
ExternalKmsInputUpdateSchema
} from "@app/ee/services/external-kms/providers/model";
import { NotFoundError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -293,67 +288,4 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
return { externalKms };
}
});
server.route({
method: "POST",
url: "/gcp/keys",
config: {
rateLimit: writeLimit
},
schema: {
body: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
region: z.string().trim().min(1),
credential: ExternalKmsGcpCredentialSchema
}),
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
region: z.string().trim().min(1),
kmsId: z.string().trim().min(1)
})
]),
response: {
200: z.object({
keys: z.string().array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { region, authMethod } = req.body;
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
credentialJson = req.body.credential;
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.kmsId
});
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
}
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
}
if (!credentialJson) {
throw new NotFoundError({
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
});
}
const results = await server.services.externalKms.fetchGcpKeys({
credential: credentialJson,
gcpRegion: region
});
return results;
}
});
};

View File

@@ -0,0 +1,12 @@
import { ExternalKmsAwsSchema, KmsProviders } from "@app/ee/services/external-kms/providers/model";
import { registerExternalKmsEndpoints } from "./external-kms-endpoints";
export const registerAwsKmsRouter = async (server: FastifyZodProvider) => {
registerExternalKmsEndpoints({
server,
provider: KmsProviders.Aws,
createSchema: ExternalKmsAwsSchema,
updateSchema: ExternalKmsAwsSchema.partial()
});
};

View File

@@ -0,0 +1,300 @@
import { z } from "zod";
import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
KmsProviders,
SanitizedExternalKmsAwsSchema,
SanitizedExternalKmsGcpSchema,
TExternalKmsInputSchema,
TExternalKmsInputUpdateSchema
} from "@app/ee/services/external-kms/providers/model";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError } from "@app/lib/errors";
import { deterministicStringify } from "@app/lib/fn/object";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const sanitizedExternalSchema = KmsKeysSchema.extend({
externalKms: ExternalKmsSchema.pick({
id: true,
status: true,
statusDetails: true,
provider: true
}).extend({
configuration: z.union([SanitizedExternalKmsAwsSchema, SanitizedExternalKmsGcpSchema]),
credentialsHash: z.string().optional()
})
});
export const registerExternalKmsEndpoints = <
T extends { type: KmsProviders; inputs: TExternalKmsInputSchema["inputs"] }
>({
server,
provider,
createSchema,
updateSchema
}: {
server: FastifyZodProvider;
provider: T["type"];
createSchema: z.ZodType<T["inputs"]>;
updateSchema: z.ZodType<Partial<T["inputs"]>>;
}) => {
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string().trim().min(1)
}),
response: {
200: sanitizedExternalSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
// Validate that the KMS is of the expected provider type
if (externalKms.external.provider !== provider) {
throw new BadRequestError({
message: `KMS provider mismatch. Expected ${provider}, got ${externalKms.external.provider}`
});
}
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_KMS,
metadata: {
kmsId: externalKms.id,
name: externalKms.name
}
}
});
const {
external: { providerInput: configuration, ...externalKmsData },
...rest
} = externalKms;
const credentialsToHash = deterministicStringify(configuration.credential);
const credentialsHash = crypto.nativeCrypto
.createHash("sha256")
.update(Buffer.from(credentialsToHash))
.digest("hex");
return { ...rest, externalKms: { ...externalKmsData, configuration, credentialsHash } };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
name: z.string().min(1).trim().toLowerCase(),
description: z.string().trim().optional(),
configuration: createSchema
}),
response: {
200: sanitizedExternalSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { name, description, configuration } = req.body as {
name: string;
description?: string;
configuration: T["inputs"];
};
const providerInput = {
type: provider,
inputs: configuration
} as TExternalKmsInputSchema;
const externalKms = await server.services.externalKms.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name,
provider: providerInput,
description
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.CREATE_KMS,
metadata: {
kmsId: externalKms.id,
provider,
name,
description
}
}
});
const {
external: { providerInput: externalKmsConfiguration, ...externalKmsData },
...rest
} = externalKms;
const credentialsToHash = deterministicStringify(externalKmsConfiguration.credential);
const credentialsHash = crypto.nativeCrypto
.createHash("sha256")
.update(Buffer.from(credentialsToHash))
.digest("hex");
return { ...rest, externalKms: { ...externalKmsData, configuration: externalKmsConfiguration, credentialsHash } };
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string().trim().min(1)
}),
body: z.object({
name: z.string().min(1).trim().toLowerCase().optional(),
description: z.string().trim().optional(),
configuration: updateSchema.optional()
}),
response: {
200: sanitizedExternalSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { name, description, configuration } = req.body as {
name?: string;
description?: string;
configuration: Partial<T["inputs"]>;
};
const providerInput = {
type: provider,
inputs: configuration
} as TExternalKmsInputUpdateSchema;
const externalKms = await server.services.externalKms.updateById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name,
provider: providerInput,
description,
id: req.params.id
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.UPDATE_KMS,
metadata: {
kmsId: externalKms.id,
provider,
name,
description
}
}
});
const {
external: { providerInput: externalKmsConfiguration, ...externalKmsData },
...rest
} = externalKms;
const credentialsToHash = deterministicStringify(externalKmsConfiguration.credential);
const credentialsHash = crypto.nativeCrypto
.createHash("sha256")
.update(Buffer.from(credentialsToHash))
.digest("hex");
return { ...rest, externalKms: { ...externalKmsData, configuration: externalKmsConfiguration, credentialsHash } };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string().trim().min(1)
}),
response: {
200: sanitizedExternalSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.deleteById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
// Validate that the KMS is of the expected provider type
if (externalKms.external.provider !== provider) {
throw new BadRequestError({
message: `KMS provider mismatch. Expected ${provider}, got ${externalKms.external.provider}`
});
}
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.DELETE_KMS,
metadata: {
kmsId: externalKms.id,
name: externalKms.name
}
}
});
const {
external: { providerInput: configuration, ...externalKmsData },
...rest
} = externalKms;
const credentialsToHash = deterministicStringify(configuration.credential);
const credentialsHash = crypto.nativeCrypto
.createHash("sha256")
.update(Buffer.from(credentialsToHash))
.digest("hex");
return { ...rest, externalKms: { ...externalKmsData, configuration, credentialsHash } };
}
});
};

View File

@@ -0,0 +1,88 @@
import { z } from "zod";
import {
ExternalKmsGcpCredentialSchema,
ExternalKmsGcpSchema,
KmsGcpKeyFetchAuthType,
KmsProviders,
TExternalKmsGcpCredentialSchema
} from "@app/ee/services/external-kms/providers/model";
import { NotFoundError } from "@app/lib/errors";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerExternalKmsEndpoints } from "./external-kms-endpoints";
export const registerGcpKmsRouter = async (server: FastifyZodProvider) => {
registerExternalKmsEndpoints({
server,
provider: KmsProviders.Gcp,
createSchema: ExternalKmsGcpSchema,
updateSchema: ExternalKmsGcpSchema.partial()
});
server.route({
method: "POST",
url: "/keys",
config: {
rateLimit: writeLimit
},
schema: {
body: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
region: z.string().trim().min(1),
credential: ExternalKmsGcpCredentialSchema
}),
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
region: z.string().trim().min(1),
kmsId: z.string().trim().min(1)
})
]),
response: {
200: z.object({
keys: z.string().array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { region, authMethod } = req.body;
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
if (authMethod === KmsGcpKeyFetchAuthType.Credential && "credential" in req.body) {
credentialJson = req.body.credential;
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms && "kmsId" in req.body) {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.kmsId
});
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
}
const providerInput = externalKms.external.providerInput as { credential: TExternalKmsGcpCredentialSchema };
credentialJson = providerInput.credential;
}
if (!credentialJson) {
throw new NotFoundError({
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
});
}
const results = await server.services.externalKms.fetchGcpKeys({
credential: credentialJson,
gcpRegion: region
});
return results;
}
});
};

View File

@@ -0,0 +1,9 @@
import { KmsProviders } from "@app/ee/services/external-kms/providers/model";
import { registerAwsKmsRouter } from "./aws-kms-router";
import { registerGcpKmsRouter } from "./gcp-kms-router";
export const EXTERNAL_KMS_REGISTER_ROUTER_MAP: Record<KmsProviders, (server: FastifyZodProvider) => Promise<void>> = {
[KmsProviders.Aws]: registerAwsKmsRouter,
[KmsProviders.Gcp]: registerGcpKmsRouter
};

View File

@@ -12,6 +12,7 @@ import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router"
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { EXTERNAL_KMS_REGISTER_ROUTER_MAP } from "./external-kms-routers";
import { registerGatewayRouter } from "./gateway-router";
import { registerGithubOrgSyncRouter } from "./github-org-sync-router";
import { registerGroupRouter } from "./group-router";
@@ -162,9 +163,19 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/additional-privilege" }
);
await server.register(registerExternalKmsRouter, {
prefix: "/external-kms"
});
await server.register(
async (externalKmsRouter) => {
await externalKmsRouter.register(registerExternalKmsRouter);
// Provider-specific endpoints
await Promise.all(
Object.entries(EXTERNAL_KMS_REGISTER_ROUTER_MAP).map(([provider, router]) =>
externalKmsRouter.register(router, { prefix: `/${provider}` })
)
);
},
{ prefix: "/external-kms" }
);
await server.register(registerIdentityTemplateRouter, { prefix: "/identity-templates" });
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });

View File

@@ -58,7 +58,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
const plan = await server.services.license.getOrgPlan({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.rootOrgId,
actorOrgId: req.permission.orgId,
rootOrgId: req.permission.rootOrgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId,
refreshCache: req.query.refreshCache
@@ -87,7 +88,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
orgId: req.params.organizationId,
rootOrgId: req.permission.rootOrgId
});
return data;
}

View File

@@ -1,3 +1,13 @@
import {
CreateAwsIamAccountSchema,
SanitizedAwsIamAccountWithResourceSchema,
UpdateAwsIamAccountSchema
} from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
import {
CreateKubernetesAccountSchema,
SanitizedKubernetesAccountWithResourceSchema,
UpdateKubernetesAccountSchema
} from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
import {
CreateMySQLAccountSchema,
SanitizedMySQLAccountWithResourceSchema,
@@ -44,5 +54,23 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fasti
createAccountSchema: CreateSSHAccountSchema,
updateAccountSchema: UpdateSSHAccountSchema
});
},
[PamResource.Kubernetes]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.Kubernetes,
accountResponseSchema: SanitizedKubernetesAccountWithResourceSchema,
createAccountSchema: CreateKubernetesAccountSchema,
updateAccountSchema: UpdateKubernetesAccountSchema
});
},
[PamResource.AwsIam]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.AwsIam,
accountResponseSchema: SanitizedAwsIamAccountWithResourceSchema,
createAccountSchema: CreateAwsIamAccountSchema,
updateAccountSchema: UpdateAwsIamAccountSchema
});
}
};

View File

@@ -22,7 +22,7 @@ export const registerPamResourceEndpoints = <C extends TPamAccount>({
folderId?: C["folderId"];
name: C["name"];
description?: C["description"];
rotationEnabled: C["rotationEnabled"];
rotationEnabled?: C["rotationEnabled"];
rotationIntervalSeconds?: C["rotationIntervalSeconds"];
}>;
updateAccountSchema: z.ZodType<{
@@ -65,7 +65,7 @@ export const registerPamResourceEndpoints = <C extends TPamAccount>({
folderId: req.body.folderId,
name: req.body.name,
description: req.body.description,
rotationEnabled: req.body.rotationEnabled,
rotationEnabled: req.body.rotationEnabled ?? false,
rotationIntervalSeconds: req.body.rotationIntervalSeconds
}
}

View File

@@ -3,8 +3,11 @@ import { z } from "zod";
import { PamFoldersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PamAccountOrderBy, PamAccountView } from "@app/ee/services/pam-account/pam-account-enums";
import { SanitizedAwsIamAccountWithResourceSchema } from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
import { SanitizedKubernetesAccountWithResourceSchema } from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
import { SanitizedMySQLAccountWithResourceSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums";
import { GatewayAccessResponseSchema } from "@app/ee/services/pam-resource/pam-resource-schemas";
import { SanitizedPostgresAccountWithResourceSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { SanitizedSSHAccountWithResourceSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { BadRequestError } from "@app/lib/errors";
@@ -18,9 +21,19 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedAccountSchema = z.union([
SanitizedSSHAccountWithResourceSchema, // ORDER MATTERS
SanitizedPostgresAccountWithResourceSchema,
SanitizedMySQLAccountWithResourceSchema
SanitizedMySQLAccountWithResourceSchema,
SanitizedKubernetesAccountWithResourceSchema,
SanitizedAwsIamAccountWithResourceSchema
]);
const ListPamAccountsResponseSchema = z.object({
accounts: SanitizedAccountSchema.array(),
folders: PamFoldersSchema.array(),
totalCount: z.number().default(0),
folderId: z.string().optional(),
folderPaths: z.record(z.string(), z.string())
});
export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
@@ -50,13 +63,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
.optional()
}),
response: {
200: z.object({
accounts: SanitizedAccountSchema.array(),
folders: PamFoldersSchema.array(),
totalCount: z.number().default(0),
folderId: z.string().optional(),
folderPaths: z.record(z.string(), z.string())
})
200: ListPamAccountsResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
@@ -93,7 +100,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
}
});
return { accounts, folders, totalCount, folderId, folderPaths };
return { accounts, folders, totalCount, folderId, folderPaths } as z.infer<typeof ListPamAccountsResponseSchema>;
}
});
@@ -106,7 +113,8 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
schema: {
description: "Access PAM account",
body: z.object({
accountId: z.string().uuid(),
accountPath: z.string().trim(),
projectId: z.string().uuid(),
duration: z
.string()
.min(1)
@@ -124,18 +132,20 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
})
}),
response: {
200: z.object({
200: z.discriminatedUnion("resourceType", [
// Gateway-based resources (Postgres, MySQL, SSH)
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.Postgres) }),
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.MySQL) }),
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.SSH) }),
GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.Kubernetes) }),
// AWS IAM (no gateway, returns console URL)
z.object({
sessionId: z.string(),
resourceType: z.nativeEnum(PamResource),
relayClientCertificate: z.string(),
relayClientPrivateKey: z.string(),
relayServerCertificateChain: z.string(),
gatewayClientCertificate: z.string(),
gatewayClientPrivateKey: z.string(),
gatewayServerCertificateChain: z.string(),
relayHost: z.string(),
resourceType: z.literal(PamResource.AwsIam),
consoleUrl: z.string().url(),
metadata: z.record(z.string(), z.string().optional()).optional()
})
])
}
},
onRequest: verifyAuth([AuthMode.JWT]),
@@ -151,7 +161,9 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
actorIp: req.realIp,
actorName: `${req.auth.user.firstName ?? ""} ${req.auth.user.lastName ?? ""}`.trim(),
actorUserAgent: req.auditLogInfo.userAgent ?? "",
...req.body
accountPath: req.body.accountPath,
projectId: req.body.projectId,
duration: req.body.duration
},
req.permission
);
@@ -159,11 +171,12 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: response.projectId,
projectId: req.body.projectId,
event: {
type: EventType.PAM_ACCOUNT_ACCESS,
metadata: {
accountId: req.body.accountId,
accountId: response.account.id,
accountPath: req.body.accountPath,
accountName: response.account.name,
duration: req.body.duration ? new Date(req.body.duration).toISOString() : undefined
}

View File

@@ -1,3 +1,13 @@
import {
CreateAwsIamResourceSchema,
SanitizedAwsIamResourceSchema,
UpdateAwsIamResourceSchema
} from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
import {
CreateKubernetesResourceSchema,
SanitizedKubernetesResourceSchema,
UpdateKubernetesResourceSchema
} from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
import {
CreateMySQLResourceSchema,
MySQLResourceSchema,
@@ -44,5 +54,23 @@ export const PAM_RESOURCE_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fast
createResourceSchema: CreateSSHResourceSchema,
updateResourceSchema: UpdateSSHResourceSchema
});
},
[PamResource.Kubernetes]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.Kubernetes,
resourceResponseSchema: SanitizedKubernetesResourceSchema,
createResourceSchema: CreateKubernetesResourceSchema,
updateResourceSchema: UpdateKubernetesResourceSchema
});
},
[PamResource.AwsIam]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.AwsIam,
resourceResponseSchema: SanitizedAwsIamResourceSchema,
createResourceSchema: CreateAwsIamResourceSchema,
updateResourceSchema: UpdateAwsIamResourceSchema
});
}
};

View File

@@ -19,7 +19,7 @@ export const registerPamResourceEndpoints = <T extends TPamResource>({
createResourceSchema: z.ZodType<{
projectId: T["projectId"];
connectionDetails: T["connectionDetails"];
gatewayId: T["gatewayId"];
gatewayId?: T["gatewayId"];
name: T["name"];
rotationAccountCredentials?: T["rotationAccountCredentials"];
}>;
@@ -103,7 +103,7 @@ export const registerPamResourceEndpoints = <T extends TPamResource>({
type: EventType.PAM_RESOURCE_CREATE,
metadata: {
resourceType,
gatewayId: req.body.gatewayId,
...(req.body.gatewayId && { gatewayId: req.body.gatewayId }),
name: req.body.name
}
}
@@ -150,8 +150,8 @@ export const registerPamResourceEndpoints = <T extends TPamResource>({
metadata: {
resourceId: req.params.resourceId,
resourceType,
gatewayId: req.body.gatewayId,
name: req.body.name
...(req.body.gatewayId && { gatewayId: req.body.gatewayId }),
...(req.body.name && { name: req.body.name })
}
}
});

View File

@@ -1,6 +1,14 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
AwsIamResourceListItemSchema,
SanitizedAwsIamResourceSchema
} from "@app/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas";
import {
KubernetesResourceListItemSchema,
SanitizedKubernetesResourceSchema
} from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
import {
MySQLResourceListItemSchema,
SanitizedMySQLResourceSchema
@@ -22,13 +30,17 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedResourceSchema = z.union([
SanitizedPostgresResourceSchema,
SanitizedMySQLResourceSchema,
SanitizedSSHResourceSchema
SanitizedSSHResourceSchema,
SanitizedKubernetesResourceSchema,
SanitizedAwsIamResourceSchema
]);
const ResourceOptionsSchema = z.discriminatedUnion("resource", [
PostgresResourceListItemSchema,
MySQLResourceListItemSchema,
SSHResourceListItemSchema
SSHResourceListItemSchema,
KubernetesResourceListItemSchema,
AwsIamResourceListItemSchema
]);
export const registerPamResourceRouter = async (server: FastifyZodProvider) => {

View File

@@ -2,10 +2,12 @@ import { z } from "zod";
import { PamSessionsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { KubernetesSessionCredentialsSchema } from "@app/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas";
import { MySQLSessionCredentialsSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PostgresSessionCredentialsSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { SSHSessionCredentialsSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import {
HttpEventSchema,
PamSessionCommandLogSchema,
SanitizedSessionSchema,
TerminalEventSchema
@@ -17,7 +19,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SessionCredentialsSchema = z.union([
SSHSessionCredentialsSchema,
PostgresSessionCredentialsSchema,
MySQLSessionCredentialsSchema
MySQLSessionCredentialsSchema,
KubernetesSessionCredentialsSchema
]);
export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
@@ -89,7 +92,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
sessionId: z.string().uuid()
}),
body: z.object({
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema, HttpEventSchema]))
}),
response: {
200: z.object({

View File

@@ -315,6 +315,8 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
memberships: z
.object({
id: z.string(),
actorGroupId: z.string().nullish(),
actorUserId: z.string().nullish(),
roles: z
.object({
role: z.string()

View File

@@ -57,7 +57,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
body: z.object({
organizationId: z.string().trim(),
description: z.string().trim().default(""),
ttlDays: z.number().min(0).default(0)
ttlDays: z.number().min(0).max(730).default(0)
}),
response: {
200: z.object({

View File

@@ -142,6 +142,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
data: {
...req.body,
...req.body.type,
name: req.body.slug,
permissions: req.body.permissions
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts

View File

@@ -84,7 +84,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
privilege: {
...privilege,
identityId: req.body.identityId,
projectMembershipId: req.body.projectId,
projectId: req.body.projectId,
slug: privilege.name
}
@@ -168,7 +167,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
privilege: {
...privilege,
identityId: privilegeDoc.actorIdentityId as string,
projectMembershipId: privilegeDoc.projectId as string,
projectId: privilegeDoc.projectId as string,
slug: privilege.name
}
@@ -222,7 +220,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
privilege: {
...privilege,
identityId: privilegeDoc.actorIdentityId as string,
projectMembershipId: privilegeDoc.projectId as string,
projectId: privilegeDoc.projectId as string,
slug: privilege.name
}
@@ -276,7 +273,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
privilege: {
...privilege,
identityId: privilegeDoc.actorIdentityId as string,
projectMembershipId: privilegeDoc.projectId as string,
projectId: privilegeDoc.projectId as string,
slug: privilege.name
}
@@ -339,7 +335,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
privilege: {
...privilege,
identityId: req.query.identityId,
projectMembershipId: privilege.projectId as string,
projectId,
slug: privilege.name
}
@@ -391,7 +386,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
privileges: privileges.map((privilege) => ({
...privilege,
identityId: req.query.identityId,
projectMembershipId: privilege.projectId as string,
projectId: req.query.projectId,
slug: privilege.name
}))

View File

@@ -4,6 +4,7 @@ import { registerAuth0ClientSecretRotationRouter } from "./auth0-client-secret-r
import { registerAwsIamUserSecretRotationRouter } from "./aws-iam-user-secret-rotation-router";
import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-rotation-router";
import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router";
import { registerMongoDBCredentialsRotationRouter } from "./mongodb-credentials-rotation-router";
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
import { registerMySqlCredentialsRotationRouter } from "./mysql-credentials-rotation-router";
import { registerOktaClientSecretRotationRouter } from "./okta-client-secret-rotation-router";
@@ -26,5 +27,6 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
[SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter,
[SecretRotation.LdapPassword]: registerLdapPasswordRotationRouter,
[SecretRotation.OktaClientSecret]: registerOktaClientSecretRotationRouter,
[SecretRotation.RedisCredentials]: registerRedisCredentialsRotationRouter
[SecretRotation.RedisCredentials]: registerRedisCredentialsRotationRouter,
[SecretRotation.MongoDBCredentials]: registerMongoDBCredentialsRotationRouter
};

View File

@@ -0,0 +1,19 @@
import {
CreateMongoDBCredentialsRotationSchema,
MongoDBCredentialsRotationGeneratedCredentialsSchema,
MongoDBCredentialsRotationSchema,
UpdateMongoDBCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/mongodb-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerMongoDBCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.MongoDBCredentials,
server,
responseSchema: MongoDBCredentialsRotationSchema,
createSchema: CreateMongoDBCredentialsRotationSchema,
updateSchema: UpdateMongoDBCredentialsRotationSchema,
generatedCredentialsSchema: MongoDBCredentialsRotationGeneratedCredentialsSchema
});

View File

@@ -5,6 +5,7 @@ import { Auth0ClientSecretRotationListItemSchema } from "@app/ee/services/secret
import { AwsIamUserSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/aws-iam-user-secret";
import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/azure-client-secret";
import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
import { MongoDBCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mongodb-credentials";
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { MySqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { OktaClientSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/okta-client-secret";
@@ -27,7 +28,8 @@ const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
AwsIamUserSecretRotationListItemSchema,
LdapPasswordRotationListItemSchema,
OktaClientSecretRotationListItemSchema,
RedisCredentialsRotationListItemSchema
RedisCredentialsRotationListItemSchema,
MongoDBCredentialsRotationListItemSchema
]);
export const registerSecretRotationV2Router = async (server: FastifyZodProvider) => {

View File

@@ -49,6 +49,7 @@ import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
import { KmipPermission } from "../kmip/kmip-enum";
import { AcmeChallengeType, AcmeIdentifierType } from "../pki-acme/pki-acme-schemas";
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
export type TListProjectAuditLogDTO = {
@@ -78,7 +79,9 @@ export type TCreateAuditLogDTO = {
| ScimClientActor
| PlatformActor
| UnknownUserActor
| KmipClientActor;
| KmipClientActor
| AcmeProfileActor
| AcmeAccountActor;
orgId?: string;
projectId?: string;
} & BaseAuthData;
@@ -368,6 +371,7 @@ export enum EventType {
ORG_ADMIN_BYPASS_SSO = "org-admin-bypassed-sso",
USER_LOGIN = "user-login",
SELECT_ORGANIZATION = "select-organization",
SELECT_SUB_ORGANIZATION = "select-sub-organization",
CREATE_CERTIFICATE_TEMPLATE = "create-certificate-template",
UPDATE_CERTIFICATE_TEMPLATE = "update-certificate-template",
DELETE_CERTIFICATE_TEMPLATE = "delete-certificate-template",
@@ -388,6 +392,9 @@ export enum EventType {
GET_CERTIFICATE_PROFILE_LATEST_ACTIVE_BUNDLE = "get-certificate-profile-latest-active-bundle",
UPDATE_CERTIFICATE_RENEWAL_CONFIG = "update-certificate-renewal-config",
DISABLE_CERTIFICATE_RENEWAL_CONFIG = "disable-certificate-renewal-config",
CREATE_CERTIFICATE_REQUEST = "create-certificate-request",
GET_CERTIFICATE_REQUEST = "get-certificate-request",
GET_CERTIFICATE_FROM_REQUEST = "get-certificate-from-request",
ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration",
ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration",
GET_PROJECT_SLACK_CONFIG = "get-project-slack-config",
@@ -556,7 +563,32 @@ export enum EventType {
PAM_RESOURCE_GET = "pam-resource-get",
PAM_RESOURCE_CREATE = "pam-resource-create",
PAM_RESOURCE_UPDATE = "pam-resource-update",
PAM_RESOURCE_DELETE = "pam-resource-delete"
PAM_RESOURCE_DELETE = "pam-resource-delete",
APPROVAL_POLICY_CREATE = "approval-policy-create",
APPROVAL_POLICY_UPDATE = "approval-policy-update",
APPROVAL_POLICY_DELETE = "approval-policy-delete",
APPROVAL_POLICY_LIST = "approval-policy-list",
APPROVAL_POLICY_GET = "approval-policy-get",
APPROVAL_REQUEST_GET = "approval-request-get",
APPROVAL_REQUEST_LIST = "approval-request-list",
APPROVAL_REQUEST_CREATE = "approval-request-create",
APPROVAL_REQUEST_APPROVE = "approval-request-approve",
APPROVAL_REQUEST_REJECT = "approval-request-reject",
APPROVAL_REQUEST_CANCEL = "approval-request-cancel",
APPROVAL_REQUEST_GRANT_LIST = "approval-request-grant-list",
APPROVAL_REQUEST_GRANT_GET = "approval-request-grant-get",
APPROVAL_REQUEST_GRANT_REVOKE = "approval-request-grant-revoke",
// PKI ACME
CREATE_ACME_ACCOUNT = "create-acme-account",
RETRIEVE_ACME_ACCOUNT = "retrieve-acme-account",
CREATE_ACME_ORDER = "create-acme-order",
FINALIZE_ACME_ORDER = "finalize-acme-order",
DOWNLOAD_ACME_CERTIFICATE = "download-acme-certificate",
RESPOND_TO_ACME_CHALLENGE = "respond-to-acme-challenge",
PASS_ACME_CHALLENGE = "pass-acme-challenge",
ATTEMPT_ACME_CHALLENGE = "attempt-acme-challenge",
FAIL_ACME_CHALLENGE = "fail-acme-challenge"
}
export const filterableSecretEvents: EventType[] = [
@@ -597,6 +629,15 @@ interface KmipClientActorMetadata {
name: string;
}
interface AcmeProfileActorMetadata {
profileId: string;
}
interface AcmeAccountActorMetadata {
profileId: string;
accountId: string;
}
interface UnknownUserActorMetadata {}
export interface UserActor {
@@ -634,7 +675,25 @@ export interface ScimClientActor {
metadata: ScimClientActorMetadata;
}
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor | KmipClientActor;
export interface AcmeProfileActor {
type: ActorType.ACME_PROFILE;
metadata: AcmeProfileActorMetadata;
}
export interface AcmeAccountActor {
type: ActorType.ACME_ACCOUNT;
metadata: AcmeAccountActorMetadata;
}
export type Actor =
| UserActor
| ServiceActor
| IdentityActor
| ScimClientActor
| PlatformActor
| KmipClientActor
| AcmeProfileActor
| AcmeAccountActor;
interface GetSecretsEvent {
type: EventType.GET_SECRETS;
@@ -2687,6 +2746,15 @@ interface SelectOrganizationEvent {
};
}
interface SelectSubOrganizationEvent {
type: EventType.SELECT_SUB_ORGANIZATION;
metadata: {
organizationId: string;
organizationName: string;
rootOrganizationId: string;
};
}
interface CreateCertificateTemplateEstConfig {
type: EventType.CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
metadata: {
@@ -2846,7 +2914,6 @@ interface OrderCertificateFromProfile {
type: EventType.ORDER_CERTIFICATE_FROM_PROFILE;
metadata: {
certificateProfileId: string;
orderId: string;
profileName: string;
};
}
@@ -4074,6 +4141,7 @@ interface PamAccountAccessEvent {
type: EventType.PAM_ACCOUNT_ACCESS;
metadata: {
accountId: string;
accountPath: string;
accountName: string;
duration?: string;
};
@@ -4156,7 +4224,7 @@ interface PamResourceCreateEvent {
type: EventType.PAM_RESOURCE_CREATE;
metadata: {
resourceType: string;
gatewayId: string;
gatewayId?: string;
name: string;
};
}
@@ -4196,6 +4264,229 @@ interface DisableCertificateRenewalConfigEvent {
};
}
interface CreateCertificateRequestEvent {
type: EventType.CREATE_CERTIFICATE_REQUEST;
metadata: {
certificateRequestId: string;
profileId?: string;
caId?: string;
commonName?: string;
};
}
interface GetCertificateRequestEvent {
type: EventType.GET_CERTIFICATE_REQUEST;
metadata: {
certificateRequestId: string;
};
}
interface GetCertificateFromRequestEvent {
type: EventType.GET_CERTIFICATE_FROM_REQUEST;
metadata: {
certificateRequestId: string;
certificateId?: string;
};
}
interface ApprovalPolicyCreateEvent {
type: EventType.APPROVAL_POLICY_CREATE;
metadata: {
policyType: string;
name: string;
};
}
interface ApprovalPolicyUpdateEvent {
type: EventType.APPROVAL_POLICY_UPDATE;
metadata: {
policyType: string;
policyId: string;
name: string;
};
}
interface ApprovalPolicyDeleteEvent {
type: EventType.APPROVAL_POLICY_DELETE;
metadata: {
policyType: string;
policyId: string;
};
}
interface ApprovalPolicyListEvent {
type: EventType.APPROVAL_POLICY_LIST;
metadata: {
policyType: string;
count: number;
};
}
interface ApprovalPolicyGetEvent {
type: EventType.APPROVAL_POLICY_GET;
metadata: {
policyType: string;
policyId: string;
name: string;
};
}
interface ApprovalRequestGetEvent {
type: EventType.APPROVAL_REQUEST_GET;
metadata: {
policyType: string;
requestId: string;
status: string;
};
}
interface ApprovalRequestListEvent {
type: EventType.APPROVAL_REQUEST_LIST;
metadata: {
policyType: string;
count: number;
};
}
interface ApprovalRequestCreateEvent {
type: EventType.APPROVAL_REQUEST_CREATE;
metadata: {
policyType: string;
justification?: string;
requestDuration: string;
};
}
interface ApprovalRequestApproveEvent {
type: EventType.APPROVAL_REQUEST_APPROVE;
metadata: {
policyType: string;
requestId: string;
comment?: string;
};
}
interface ApprovalRequestRejectEvent {
type: EventType.APPROVAL_REQUEST_REJECT;
metadata: {
policyType: string;
requestId: string;
comment?: string;
};
}
interface ApprovalRequestCancelEvent {
type: EventType.APPROVAL_REQUEST_CANCEL;
metadata: {
policyType: string;
requestId: string;
};
}
interface ApprovalRequestGrantListEvent {
type: EventType.APPROVAL_REQUEST_GRANT_LIST;
metadata: {
policyType: string;
count: number;
};
}
interface ApprovalRequestGrantGetEvent {
type: EventType.APPROVAL_REQUEST_GRANT_GET;
metadata: {
policyType: string;
grantId: string;
status: string;
};
}
interface ApprovalRequestGrantRevokeEvent {
type: EventType.APPROVAL_REQUEST_GRANT_REVOKE;
metadata: {
policyType: string;
grantId: string;
revocationReason?: string;
};
}
interface CreateAcmeAccountEvent {
type: EventType.CREATE_ACME_ACCOUNT;
metadata: {
accountId: string;
publicKeyThumbprint: string;
emails?: string[];
};
}
interface RetrieveAcmeAccountEvent {
type: EventType.RETRIEVE_ACME_ACCOUNT;
metadata: {
accountId: string;
publicKeyThumbprint: string;
};
}
interface CreateAcmeOrderEvent {
type: EventType.CREATE_ACME_ORDER;
metadata: {
orderId: string;
identifiers: Array<{
type: AcmeIdentifierType;
value: string;
}>;
};
}
interface FinalizeAcmeOrderEvent {
type: EventType.FINALIZE_ACME_ORDER;
metadata: {
orderId: string;
csr: string;
};
}
interface DownloadAcmeCertificateEvent {
type: EventType.DOWNLOAD_ACME_CERTIFICATE;
metadata: {
orderId: string;
};
}
interface RespondToAcmeChallengeEvent {
type: EventType.RESPOND_TO_ACME_CHALLENGE;
metadata: {
challengeId: string;
type: AcmeChallengeType;
};
}
interface PassedAcmeChallengeEvent {
type: EventType.PASS_ACME_CHALLENGE;
metadata: {
challengeId: string;
type: AcmeChallengeType;
};
}
interface AttemptAcmeChallengeEvent {
type: EventType.ATTEMPT_ACME_CHALLENGE;
metadata: {
challengeId: string;
type: AcmeChallengeType;
retryCount: number;
errorMessage: string;
};
}
interface FailAcmeChallengeEvent {
type: EventType.FAIL_ACME_CHALLENGE;
metadata: {
challengeId: string;
type: AcmeChallengeType;
retryCount: number;
errorMessage: string;
};
}
export type Event =
| CreateSubOrganizationEvent
| UpdateSubOrganizationEvent
@@ -4575,7 +4866,34 @@ export type Event =
| PamResourceDeleteEvent
| UpdateCertificateRenewalConfigEvent
| DisableCertificateRenewalConfigEvent
| CreateCertificateRequestEvent
| GetCertificateRequestEvent
| GetCertificateFromRequestEvent
| AutomatedRenewCertificate
| AutomatedRenewCertificateFailed
| UserLoginEvent
| SelectOrganizationEvent;
| SelectOrganizationEvent
| SelectSubOrganizationEvent
| ApprovalPolicyCreateEvent
| ApprovalPolicyUpdateEvent
| ApprovalPolicyDeleteEvent
| ApprovalPolicyListEvent
| ApprovalPolicyGetEvent
| ApprovalRequestGetEvent
| ApprovalRequestListEvent
| ApprovalRequestCreateEvent
| ApprovalRequestApproveEvent
| ApprovalRequestRejectEvent
| ApprovalRequestCancelEvent
| ApprovalRequestGrantListEvent
| ApprovalRequestGrantGetEvent
| ApprovalRequestGrantRevokeEvent
| CreateAcmeAccountEvent
| RetrieveAcmeAccountEvent
| CreateAcmeOrderEvent
| FinalizeAcmeOrderEvent
| DownloadAcmeCertificateEvent
| RespondToAcmeChallengeEvent
| PassedAcmeChallengeEvent
| AttemptAcmeChallengeEvent
| FailAcmeChallengeEvent;

View File

@@ -4,7 +4,10 @@ import * as x509 from "@peculiar/x509";
import { ActionProjectType } from "@app/db/schemas";
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import {
ProjectPermissionCertificateAuthorityActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { NotFoundError } from "@app/lib/errors";
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
import { expandInternalCa } from "@app/services/certificate-authority/certificate-authority-fns";
@@ -83,7 +86,7 @@ export const certificateAuthorityCrlServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionCertificateAuthorityActions.Read,
ProjectPermissionSub.CertificateAuthorities
);

View File

@@ -24,7 +24,13 @@ import {
} from "./external-kms-types";
import { AwsKmsProviderFactory } from "./providers/aws-kms";
import { GcpKmsProviderFactory } from "./providers/gcp-kms";
import { ExternalKmsAwsSchema, ExternalKmsGcpSchema, KmsProviders, TExternalKmsGcpSchema } from "./providers/model";
import {
ExternalKmsAwsSchema,
ExternalKmsGcpSchema,
KmsProviders,
TExternalKmsAwsSchema,
TExternalKmsGcpSchema
} from "./providers/model";
type TExternalKmsServiceFactoryDep = {
externalKmsDAL: TExternalKmsDALFactory;
@@ -72,6 +78,7 @@ export const externalKmsServiceFactory = ({
const kmsName = name ? slugify(name) : slugify(alphaNumericNanoId(8).toLowerCase());
let sanitizedProviderInput = "";
let sanitizedProviderInputObject: TExternalKmsAwsSchema | TExternalKmsGcpSchema;
switch (provider.type) {
case KmsProviders.Aws:
{
@@ -88,9 +95,18 @@ export const externalKmsServiceFactory = ({
try {
// if missing kms key this generate a new kms key id and returns new provider input
const newProviderInput = await externalKms.generateInputKmsKey();
sanitizedProviderInputObject = newProviderInput;
sanitizedProviderInput = JSON.stringify(newProviderInput);
await externalKms.validateConnection();
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: error instanceof Error ? `AWS error: ${error.message}` : "Failed to validate AWS connection"
});
} finally {
await externalKms.cleanup();
}
@@ -101,7 +117,16 @@ export const externalKmsServiceFactory = ({
const externalKms = await GcpKmsProviderFactory({ inputs: provider.inputs });
try {
await externalKms.validateConnection();
sanitizedProviderInputObject = provider.inputs;
sanitizedProviderInput = JSON.stringify(provider.inputs);
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: error instanceof Error ? `GCP error: ${error.message}` : "Failed to validate GCP connection"
});
} finally {
await externalKms.cleanup();
}
@@ -139,7 +164,10 @@ export const externalKmsServiceFactory = ({
},
tx
);
return { ...kms, external: externalKmsCfg };
return {
...kms,
external: { ...externalKmsCfg, providerInput: sanitizedProviderInputObject }
};
});
return externalKms;
@@ -179,6 +207,7 @@ export const externalKmsServiceFactory = ({
if (!externalKmsDoc) throw new NotFoundError({ message: `External KMS with ID '${kmsId}' not found` });
let sanitizedProviderInput = "";
let sanitizedProviderInputObject: TExternalKmsAwsSchema | TExternalKmsGcpSchema;
const { encryptor: orgDataKeyEncryptor, decryptor: orgDataKeyDecryptor } =
await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
@@ -199,7 +228,16 @@ export const externalKmsServiceFactory = ({
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
try {
await externalKms.validateConnection();
sanitizedProviderInputObject = updatedProviderInput;
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: error instanceof Error ? `AWS error: ${error.message}` : "Failed to validate AWS connection"
});
} finally {
await externalKms.cleanup();
}
@@ -214,7 +252,16 @@ export const externalKmsServiceFactory = ({
const externalKms = await GcpKmsProviderFactory({ inputs: updatedProviderInput });
try {
await externalKms.validateConnection();
sanitizedProviderInputObject = updatedProviderInput;
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: error instanceof Error ? `GCP error: ${error.message}` : "Failed to validate GCP connection"
});
} finally {
await externalKms.cleanup();
}
@@ -234,7 +281,9 @@ export const externalKmsServiceFactory = ({
}
const externalKms = await externalKmsDAL.transaction(async (tx) => {
const kms = await kmsDAL.updateById(
let kms = kmsDoc;
if (kmsName || description) {
kms = await kmsDAL.updateById(
kmsDoc.id,
{
description,
@@ -242,6 +291,7 @@ export const externalKmsServiceFactory = ({
},
tx
);
}
if (encryptedProviderInputs) {
const externalKmsCfg = await externalKmsDAL.updateById(
externalKmsDoc.id,
@@ -250,9 +300,9 @@ export const externalKmsServiceFactory = ({
},
tx
);
return { ...kms, external: externalKmsCfg };
return { ...kms, external: { ...externalKmsCfg, providerInput: sanitizedProviderInputObject } };
}
return { ...kms, external: externalKmsDoc };
return { ...kms, external: { ...externalKmsDoc, providerInput: sanitizedProviderInputObject } };
});
return externalKms;
@@ -273,9 +323,40 @@ export const externalKmsServiceFactory = ({
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
if (!externalKmsDoc) throw new NotFoundError({ message: `External KMS with ID '${kmsId}' not found` });
let decryptedProviderInputObject: TExternalKmsAwsSchema | TExternalKmsGcpSchema;
const { decryptor: orgDataKeyDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: actorOrgId
});
const decryptedProviderInputBlob = orgDataKeyDecryptor({
cipherTextBlob: externalKmsDoc.encryptedProviderInputs
});
switch (externalKmsDoc.provider) {
case KmsProviders.Aws: {
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
decryptedProviderInputObject = decryptedProviderInput;
break;
}
case KmsProviders.Gcp: {
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
decryptedProviderInputObject = decryptedProviderInput;
break;
}
default:
break;
}
const externalKms = await externalKmsDAL.transaction(async (tx) => {
const kms = await kmsDAL.deleteById(kmsDoc.id, tx);
return { ...kms, external: externalKmsDoc };
return { ...kms, external: { ...externalKmsDoc, providerInput: decryptedProviderInputObject } };
});
return externalKms;
@@ -299,6 +380,7 @@ export const externalKmsServiceFactory = ({
const findById = async ({ actor, actorId, actorOrgId, actorAuthMethod, id: kmsId }: TGetExternalKmsByIdDTO) => {
const kmsDoc = await kmsDAL.findById(kmsId);
if (!kmsDoc) throw new NotFoundError({ message: `Could not find KMS with ID '${kmsId}'` });
const { permission } = await permissionService.getOrgPermission({
scope: OrganizationActionScope.Any,
actor,
@@ -393,6 +475,14 @@ export const externalKmsServiceFactory = ({
const externalKms = await GcpKmsProviderFactory({ inputs: { credential, gcpRegion, keyName: "" } });
try {
return await externalKms.getKeysList();
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: error instanceof Error ? `GCP error: ${error.message}` : "Failed to fetch GCP keys"
});
} finally {
await externalKms.cleanup();
}

View File

@@ -3,6 +3,7 @@ import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
import { CustomAWSHasher } from "@app/lib/aws/hashing";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError } from "@app/lib/errors";
import { ExternalKmsAwsSchema, KmsAwsCredentialType, TExternalKmsAwsSchema, TExternalKmsProviderFns } from "./model";
@@ -22,7 +23,7 @@ const getAwsKmsClient = async (providerInputs: TExternalKmsAwsSchema) => {
});
const response = await stsClient.send(command);
if (!response.Credentials?.AccessKeyId || !response.Credentials?.SecretAccessKey)
throw new Error("Failed to assume role");
throw new BadRequestError({ message: "Failed to assume role" });
const kmsClient = new KMSClient({
region: providerInputs.awsRegion,
@@ -67,7 +68,7 @@ export const AwsKmsProviderFactory = async ({ inputs }: AwsKmsProviderArgs): Pro
const command = new CreateKeyCommand({ Tags: [{ TagKey: "author", TagValue: "infisical" }] });
const kmsKey = await awsClient.send(command);
if (!kmsKey.KeyMetadata?.KeyId) throw new Error("Failed to generate kms key");
if (!kmsKey.KeyMetadata?.KeyId) throw new BadRequestError({ message: "Failed to generate kms key" });
const updatedProviderInputs = await ExternalKmsAwsSchema.parseAsync({
...providerInputs,

View File

@@ -19,27 +19,31 @@ export enum KmsGcpKeyFetchAuthType {
Kms = "kmsId"
}
export const ExternalKmsAwsSchema = z.object({
credential: z
.discriminatedUnion("type", [
z.object({
type: z.literal(KmsAwsCredentialType.AccessKey),
data: z.object({
accessKey: z.string().trim().min(1).describe("AWS user account access key"),
secretKey: z.string().trim().min(1).describe("AWS user account secret key")
})
}),
z.object({
type: z.literal(KmsAwsCredentialType.AssumeRole),
data: z.object({
const AwsConnectionAssumeRoleCredentialsSchema = z.object({
assumeRoleArn: z.string().trim().min(1).describe("AWS user role to be assumed by infisical"),
externalId: z
.string()
.trim()
.min(1)
.optional()
.describe("AWS assume role external id for furthur security in authentication")
})
.describe("AWS assume role external id for further security in authentication")
});
const AwsConnectionAccessTokenCredentialsSchema = z.object({
accessKey: z.string().trim().min(1).describe("AWS user account access key"),
secretKey: z.string().trim().min(1).describe("AWS user account secret key")
});
export const ExternalKmsAwsSchema = z.object({
credential: z
.discriminatedUnion("type", [
z.object({
type: z.literal(KmsAwsCredentialType.AccessKey),
data: AwsConnectionAccessTokenCredentialsSchema
}),
z.object({
type: z.literal(KmsAwsCredentialType.AssumeRole),
data: AwsConnectionAssumeRoleCredentialsSchema
})
])
.describe("AWS credential information to connect"),
@@ -52,6 +56,22 @@ export const ExternalKmsAwsSchema = z.object({
});
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
export const SanitizedExternalKmsAwsSchema = ExternalKmsAwsSchema.extend({
credential: z.discriminatedUnion("type", [
z.object({
type: z.literal(KmsAwsCredentialType.AccessKey),
data: AwsConnectionAccessTokenCredentialsSchema.pick({ accessKey: true })
}),
z.object({
type: z.literal(KmsAwsCredentialType.AssumeRole),
data: AwsConnectionAssumeRoleCredentialsSchema.pick({
assumeRoleArn: true,
externalId: true
})
})
])
});
export const ExternalKmsGcpCredentialSchema = z.object({
type: z.literal(KmsGcpCredentialType.ServiceAccount),
project_id: z.string().min(1),
@@ -75,6 +95,8 @@ export const ExternalKmsGcpSchema = z.object({
});
export type TExternalKmsGcpSchema = z.infer<typeof ExternalKmsGcpSchema>;
export const SanitizedExternalKmsGcpSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true });
const ExternalKmsGcpClientSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true }).extend({
credential: ExternalKmsGcpCredentialSchema
});

View File

@@ -350,6 +350,7 @@ export const licenseServiceFactory = ({
actor,
actorId,
actorOrgId,
rootOrgId,
actorAuthMethod,
projectId,
refreshCache
@@ -360,12 +361,12 @@ export const licenseServiceFactory = ({
orgId,
actorOrgId,
actorAuthMethod,
scope: OrganizationActionScope.ParentOrganization
scope: OrganizationActionScope.Any
});
if (refreshCache) {
await refreshPlan(orgId);
await refreshPlan(rootOrgId);
}
const plan = await getPlan(orgId, projectId);
const plan = await getPlan(rootOrgId, projectId);
return plan;
};

View File

@@ -102,6 +102,7 @@ export type TOrgPlansTableDTO = {
export type TOrgPlanDTO = {
projectId?: string;
refreshCache?: boolean;
rootOrgId: string;
} & TOrgPermission;
export type TStartOrgTrialDTO = {

View File

@@ -72,17 +72,24 @@ export const decryptAccount = async <
account: T,
projectId: string,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
): Promise<T & { credentials: TPamAccountCredentials; lastRotationMessage: string | null }> => {
): Promise<
Omit<T, "encryptedCredentials" | "encryptedLastRotationMessage"> & {
credentials: TPamAccountCredentials;
lastRotationMessage: string | null;
}
> => {
const { encryptedCredentials, encryptedLastRotationMessage, ...rest } = account;
return {
...account,
...rest,
credentials: await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
encryptedCredentials,
projectId,
kmsService
}),
lastRotationMessage: account.encryptedLastRotationMessage
lastRotationMessage: encryptedLastRotationMessage
? await decryptAccountMessage({
encryptedMessage: account.encryptedLastRotationMessage,
encryptedMessage: encryptedLastRotationMessage,
projectId,
kmsService
})

View File

@@ -1,6 +1,13 @@
import path from "node:path";
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, OrganizationActionScope, TPamAccounts, TPamFolders, TPamResources } from "@app/db/schemas";
import {
extractAwsAccountIdFromArn,
generateConsoleFederationUrl,
TAwsIamAccountCredentials
} from "@app/ee/services/pam-resource/aws-iam";
import { PAM_RESOURCE_FACTORY_MAP } from "@app/ee/services/pam-resource/pam-resource-factory";
import { decryptResource, decryptResourceConnectionDetails } from "@app/ee/services/pam-resource/pam-resource-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
@@ -10,12 +17,23 @@ import {
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { DatabaseErrorCode } from "@app/lib/error-codes";
import { BadRequestError, DatabaseError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import {
BadRequestError,
DatabaseError,
ForbiddenRequestError,
NotFoundError,
PolicyViolationError
} from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { TApprovalPolicyDALFactory } from "@app/services/approval-policy/approval-policy-dal";
import { ApprovalPolicyType } from "@app/services/approval-policy/approval-policy-enums";
import { APPROVAL_POLICY_FACTORY_MAP } from "@app/services/approval-policy/approval-policy-factory";
import { TApprovalRequestGrantsDALFactory } from "@app/services/approval-policy/approval-request-dal";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TPamSessionExpirationServiceFactory } from "@app/services/pam-session-expiration/pam-session-expiration-queue";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
@@ -27,7 +45,8 @@ import { getFullPamFolderPath } from "../pam-folder/pam-folder-fns";
import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal";
import { PamResource } from "../pam-resource/pam-resource-enums";
import { TPamAccountCredentials } from "../pam-resource/pam-resource-types";
import { TSqlResourceConnectionDetails } from "../pam-resource/shared/sql/sql-resource-types";
import { TSqlAccountCredentials, TSqlResourceConnectionDetails } from "../pam-resource/shared/sql/sql-resource-types";
import { TSSHAccountCredentials } from "../pam-resource/ssh/ssh-resource-types";
import { TPamSessionDALFactory } from "../pam-session/pam-session-dal";
import { PamSessionStatus } from "../pam-session/pam-session-enums";
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
@@ -51,6 +70,9 @@ type TPamAccountServiceFactoryDep = {
>;
userDAL: TUserDALFactory;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
approvalPolicyDAL: TApprovalPolicyDALFactory;
approvalRequestGrantsDAL: TApprovalRequestGrantsDALFactory;
pamSessionExpirationService: Pick<TPamSessionExpirationServiceFactory, "scheduleSessionExpiration">;
};
export type TPamAccountServiceFactory = ReturnType<typeof pamAccountServiceFactory>;
@@ -67,7 +89,10 @@ export const pamAccountServiceFactory = ({
licenseService,
kmsService,
gatewayV2Service,
auditLogService
auditLogService,
approvalPolicyDAL,
approvalRequestGrantsDAL,
pamSessionExpirationService
}: TPamAccountServiceFactoryDep) => {
const create = async (
{
@@ -135,7 +160,8 @@ export const pamAccountServiceFactory = ({
resource.resourceType as PamResource,
connectionDetails,
resource.gatewayId,
gatewayV2Service
gatewayV2Service,
resource.projectId
);
const validatedCredentials = await factory.validateAccountCredentials(credentials);
@@ -250,7 +276,8 @@ export const pamAccountServiceFactory = ({
resource.resourceType as PamResource,
connectionDetails,
resource.gatewayId,
gatewayV2Service
gatewayV2Service,
account.projectId
);
const decryptedCredentials = await decryptAccountCredentials({
@@ -279,6 +306,7 @@ export const pamAccountServiceFactory = ({
return decryptAccount(account, account.projectId, kmsService);
}
try {
const updatedAccount = await pamAccountDAL.updateById(accountId, updateDoc);
return {
@@ -290,6 +318,15 @@ export const pamAccountServiceFactory = ({
rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials
}
};
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
throw new BadRequestError({
message: `Account with name '${name}' already exists for this path`
});
}
throw err;
}
};
const deleteById = async (id: string, actor: OrgServiceActor) => {
@@ -428,7 +465,7 @@ export const pamAccountServiceFactory = ({
const totalCount = totalFolderCount + totalAccountCount;
const decryptedAndPermittedAccounts: Array<
TPamAccounts & {
Omit<TPamAccounts, "encryptedCredentials" | "encryptedLastRotationMessage"> & {
resource: Pick<TPamResources, "id" | "name" | "resourceType"> & { rotationCredentialsConfigured: boolean };
credentials: TPamAccountCredentials;
lastRotationMessage: string | null;
@@ -487,7 +524,7 @@ export const pamAccountServiceFactory = ({
};
const access = async (
{ accountId, actorEmail, actorIp, actorName, actorUserAgent, duration }: TAccessAccountDTO,
{ accountPath, projectId, actorEmail, actorIp, actorName, actorUserAgent, duration }: TAccessAccountDTO,
actor: OrgServiceActor
) => {
const orgLicensePlan = await licenseService.getPlan(actor.orgId);
@@ -497,12 +534,65 @@ export const pamAccountServiceFactory = ({
});
}
const account = await pamAccountDAL.findById(accountId);
if (!account) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` });
const pathSegments: string[] = accountPath.split("/").filter(Boolean);
if (pathSegments.length === 0) {
throw new BadRequestError({ message: "Invalid accountPath. Path must contain at least the account name." });
}
const accountName: string = pathSegments[pathSegments.length - 1] ?? "";
const folderPathSegments: string[] = pathSegments.slice(0, -1);
const folderPath: string = folderPathSegments.length > 0 ? `/${folderPathSegments.join("/")}` : "/";
let folderId: string | null = null;
if (folderPath !== "/") {
const folder = await pamFolderDAL.findByPath(projectId, folderPath);
if (!folder) {
throw new NotFoundError({ message: `Folder at path '${folderPath}' not found` });
}
folderId = folder.id;
}
const account = await pamAccountDAL.findOne({
projectId,
folderId,
name: accountName
});
if (!account) {
throw new NotFoundError({
message: `Account with name '${accountName}' not found at path '${accountPath}'`
});
}
const resource = await pamResourceDAL.findById(account.resourceId);
if (!resource) throw new NotFoundError({ message: `Resource with ID '${account.resourceId}' not found` });
const fac = APPROVAL_POLICY_FACTORY_MAP[ApprovalPolicyType.PamAccess](ApprovalPolicyType.PamAccess);
const inputs = {
resourceId: resource.id,
accountPath: path.join(folderPath, account.name)
};
const canAccess = await fac.canAccess(approvalRequestGrantsDAL, resource.projectId, actor.id, inputs);
// Grant does not exist, check policy and fallback to permission check
if (!canAccess) {
const policy = await fac.matchPolicy(approvalPolicyDAL, resource.projectId, inputs);
if (policy) {
throw new PolicyViolationError({
message: "A policy is in place for this resource",
details: {
policyId: policy.id,
policyName: policy.name,
policyType: policy.type
}
});
}
// If there isn't a policy in place, continue with checking permission
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorAuthMethod: actor.authMethod,
@@ -512,35 +602,15 @@ export const pamAccountServiceFactory = ({
actionProjectType: ActionProjectType.PAM
});
const accountPath = await getFullPamFolderPath({
pamFolderDAL,
folderId: account.folderId,
projectId: account.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPamAccountActions.Access,
subject(ProjectPermissionSub.PamAccounts, {
resourceName: resource.name,
accountName: account.name,
accountPath
accountPath: folderPath
})
);
const session = await pamSessionDAL.create({
accountName: account.name,
actorEmail,
actorIp,
actorName,
actorUserAgent,
projectId: account.projectId,
resourceName: resource.name,
resourceType: resource.resourceType,
status: PamSessionStatus.Starting,
accountId: account.id,
userId: actor.id,
expiresAt: new Date(Date.now() + duration)
});
}
const { connectionDetails, gatewayId, resourceType } = await decryptResource(
resource,
@@ -551,13 +621,98 @@ export const pamAccountServiceFactory = ({
const user = await userDAL.findById(actor.id);
if (!user) throw new NotFoundError({ message: `User with ID '${actor.id}' not found` });
if (resourceType === PamResource.AwsIam) {
const awsCredentials = (await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
kmsService,
projectId: account.projectId
})) as TAwsIamAccountCredentials;
const { consoleUrl, expiresAt } = await generateConsoleFederationUrl({
connectionDetails,
targetRoleArn: awsCredentials.targetRoleArn,
roleSessionName: actorEmail,
projectId: account.projectId, // Use project ID as External ID for security
sessionDuration: awsCredentials.defaultSessionDuration
});
const session = await pamSessionDAL.create({
accountName: account.name,
actorEmail,
actorIp,
actorName,
actorUserAgent,
projectId: account.projectId,
resourceName: resource.name,
resourceType: resource.resourceType,
status: PamSessionStatus.Active, // AWS IAM sessions are immediately active
accountId: account.id,
userId: actor.id,
expiresAt,
startedAt: new Date()
});
// Schedule session expiration job to run at expiresAt
await pamSessionExpirationService.scheduleSessionExpiration(session.id, expiresAt);
return {
sessionId: session.id,
resourceType,
account,
consoleUrl,
metadata: {
awsAccountId: extractAwsAccountIdFromArn(connectionDetails.roleArn),
targetRoleArn: awsCredentials.targetRoleArn,
federatedUsername: actorEmail,
expiresAt: expiresAt.toISOString()
}
};
}
// For gateway-based resources (Postgres, MySQL, SSH), create session first
const session = await pamSessionDAL.create({
accountName: account.name,
actorEmail,
actorIp,
actorName,
actorUserAgent,
projectId,
resourceName: resource.name,
resourceType: resource.resourceType,
status: PamSessionStatus.Starting,
accountId: account.id,
userId: actor.id,
expiresAt: new Date(Date.now() + duration)
});
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required for this resource type" });
}
const { host, port } =
resourceType !== PamResource.Kubernetes
? connectionDetails
: (() => {
const url = new URL(connectionDetails.url);
let portNumber: number | undefined;
if (url.port) {
portNumber = Number(url.port);
} else {
portNumber = url.protocol === "https:" ? 443 : 80;
}
return {
host: url.hostname,
port: portNumber
};
})();
const gatewayConnectionDetails = await gatewayV2Service.getPAMConnectionDetails({
gatewayId,
duration,
sessionId: session.id,
resourceType: resource.resourceType as PamResource,
host: connectionDetails.host,
port: connectionDetails.port,
host,
port,
actorMetadata: {
id: actor.id,
type: actor.type,
@@ -578,36 +733,43 @@ export const pamAccountServiceFactory = ({
const connectionCredentials = (await decryptResourceConnectionDetails({
encryptedConnectionDetails: resource.encryptedConnectionDetails,
kmsService,
projectId: account.projectId
projectId
})) as TSqlResourceConnectionDetails;
const credentials = await decryptAccountCredentials({
const credentials = (await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
kmsService,
projectId: account.projectId
});
projectId
})) as TSqlAccountCredentials;
metadata = {
username: credentials.username,
database: connectionCredentials.database,
accountName: account.name,
accountPath
accountPath: folderPath
};
}
break;
case PamResource.SSH:
{
const credentials = await decryptAccountCredentials({
const credentials = (await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
kmsService,
projectId: account.projectId
});
projectId
})) as TSSHAccountCredentials;
metadata = {
username: credentials.username
};
}
break;
case PamResource.Kubernetes:
metadata = {
resourceName: resource.name,
accountName: account.name,
accountPath
};
break;
default:
break;
}
@@ -622,7 +784,7 @@ export const pamAccountServiceFactory = ({
gatewayClientPrivateKey: gatewayConnectionDetails.gateway.clientPrivateKey,
gatewayServerCertificateChain: gatewayConnectionDetails.gateway.serverCertificateChain,
relayHost: gatewayConnectionDetails.relayHost,
projectId: account.projectId,
projectId,
account,
metadata
};
@@ -674,7 +836,7 @@ export const pamAccountServiceFactory = ({
const resource = await pamResourceDAL.findById(account.resourceId);
if (!resource) throw new NotFoundError({ message: `Resource with ID '${account.resourceId}' not found` });
if (resource.gatewayIdentityId !== actor.id) {
if (resource.gatewayId && resource.gatewayIdentityId !== actor.id) {
throw new ForbiddenRequestError({
message: "Identity does not have access to fetch the PAM session credentials"
});
@@ -738,7 +900,8 @@ export const pamAccountServiceFactory = ({
resourceType as PamResource,
connectionDetails,
gatewayId,
gatewayV2Service
gatewayV2Service,
account.projectId
);
const newCredentials = await factory.rotateAccountCredentials(

View File

@@ -6,15 +6,18 @@ import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums";
// DTOs
export type TCreateAccountDTO = Pick<
TPamAccount,
"name" | "description" | "credentials" | "folderId" | "resourceId" | "rotationEnabled" | "rotationIntervalSeconds"
>;
"name" | "description" | "credentials" | "folderId" | "resourceId" | "rotationIntervalSeconds"
> & {
rotationEnabled?: boolean;
};
export type TUpdateAccountDTO = Partial<Omit<TCreateAccountDTO, "folderId" | "resourceId">> & {
accountId: string;
};
export type TAccessAccountDTO = {
accountId: string;
accountPath: string;
projectId: string;
actorEmail: string;
actorIp: string;
actorName: string;

View File

@@ -71,23 +71,29 @@ export const pamFolderDALFactory = (db: TDbClient) => {
const findByPath = async (projectId: string, path: string, tx?: Knex) => {
try {
const dbInstance = tx || db.replicaNode();
const pathSegments = path.split("/").filter(Boolean);
let parentId: string | null = null;
let currentFolder: Awaited<ReturnType<typeof orm.findOne>> | undefined;
for await (const segment of pathSegments) {
const query = dbInstance(TableName.PamFolder)
const folders = await dbInstance(TableName.PamFolder)
.where(`${TableName.PamFolder}.projectId`, projectId)
.where(`${TableName.PamFolder}.name`, segment);
.select(selectAllTableCols(TableName.PamFolder));
if (parentId) {
void query.where(`${TableName.PamFolder}.parentId`, parentId);
} else {
void query.whereNull(`${TableName.PamFolder}.parentId`);
const pathSegments = path.split("/").filter(Boolean);
if (pathSegments.length === 0) {
return undefined;
}
currentFolder = await query.first();
const foldersByParentId = new Map<string | null, typeof folders>();
for (const folder of folders) {
const children = foldersByParentId.get(folder.parentId ?? null) ?? [];
children.push(folder);
foldersByParentId.set(folder.parentId ?? null, children);
}
let parentId: string | null = null;
let currentFolder: (typeof folders)[0] | undefined;
for (const segment of pathSegments) {
const childFolders: typeof folders = foldersByParentId.get(parentId) || [];
currentFolder = childFolders.find((folder) => folder.name === segment);
if (!currentFolder) {
return undefined;

View File

@@ -0,0 +1,245 @@
import { AssumeRoleCommand, Credentials, STSClient, STSClientConfig } from "@aws-sdk/client-sts";
import { CustomAWSHasher } from "@app/lib/aws/hashing";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { TAwsIamResourceConnectionDetails } from "./aws-iam-resource-types";
const AWS_STS_MIN_DURATION_SECONDS = 900;
// We hardcode us-east-1 because:
// 1. IAM is global - roles can be assumed from any STS regional endpoint
// 2. The temporary credentials returned work globally across all AWS regions
// 3. The target account's resources can be in any region - it doesn't affect STS calls
const AWS_STS_DEFAULT_REGION = "us-east-1";
const createStsClient = (credentials?: Credentials): STSClient => {
const appCfg = getConfig();
const config: STSClientConfig = {
region: AWS_STS_DEFAULT_REGION,
useFipsEndpoint: crypto.isFipsModeEnabled(),
sha256: CustomAWSHasher
};
if (credentials) {
// Use provided credentials (for role chaining)
config.credentials = {
accessKeyId: credentials.AccessKeyId!,
secretAccessKey: credentials.SecretAccessKey!,
sessionToken: credentials.SessionToken
};
} else if (appCfg.PAM_AWS_ACCESS_KEY_ID && appCfg.PAM_AWS_SECRET_ACCESS_KEY) {
// Use configured static credentials
config.credentials = {
accessKeyId: appCfg.PAM_AWS_ACCESS_KEY_ID,
secretAccessKey: appCfg.PAM_AWS_SECRET_ACCESS_KEY
};
}
// Otherwise uses instance profile if hosting on AWS
return new STSClient(config);
};
/**
* Assumes the PAM role and returns the credentials.
* Returns null if assumption fails (for validation) or throws if throwOnError is true.
*/
const assumePamRole = async ({
connectionDetails,
projectId,
sessionDuration = AWS_STS_MIN_DURATION_SECONDS,
sessionNameSuffix = "validation",
throwOnError = false
}: {
connectionDetails: TAwsIamResourceConnectionDetails;
projectId: string;
sessionDuration?: number;
sessionNameSuffix?: string;
throwOnError?: boolean;
}): Promise<Credentials | null> => {
const stsClient = createStsClient();
try {
const result = await stsClient.send(
new AssumeRoleCommand({
RoleArn: connectionDetails.roleArn,
RoleSessionName: `infisical-pam-${sessionNameSuffix}-${Date.now()}`,
DurationSeconds: sessionDuration,
ExternalId: projectId
})
);
if (!result.Credentials) {
if (throwOnError) {
throw new InternalServerError({
message: "Failed to assume PAM role - AWS STS did not return credentials"
});
}
return null;
}
return result.Credentials;
} catch (error) {
if (throwOnError) {
throw new InternalServerError({
message: `Failed to assume PAM role - AWS STS did not return credentials: ${error instanceof Error ? error.message : "Unknown error"}`
});
}
return null;
}
};
/**
* Assumes a target role using PAM role credentials (role chaining).
* Returns null if assumption fails (for validation) or throws if throwOnError is true.
*/
const assumeTargetRole = async ({
pamCredentials,
targetRoleArn,
projectId,
roleSessionName,
sessionDuration = AWS_STS_MIN_DURATION_SECONDS,
throwOnError = false
}: {
pamCredentials: Credentials;
targetRoleArn: string;
projectId: string;
roleSessionName: string;
sessionDuration?: number;
throwOnError?: boolean;
}): Promise<Credentials | null> => {
const chainedStsClient = createStsClient(pamCredentials);
try {
const result = await chainedStsClient.send(
new AssumeRoleCommand({
RoleArn: targetRoleArn,
RoleSessionName: roleSessionName,
DurationSeconds: sessionDuration,
ExternalId: projectId
})
);
if (!result.Credentials) {
if (throwOnError) {
throw new BadRequestError({
message: "Failed to assume target role - verify the target role trust policy allows the PAM role to assume it"
});
}
return null;
}
return result.Credentials;
} catch (error) {
if (throwOnError) {
throw new InternalServerError({
message: `Failed to assume target role - AWS STS did not return credentials: ${error instanceof Error ? error.message : "Unknown error"}`
});
}
return null;
}
};
export const validatePamRoleConnection = async (
connectionDetails: TAwsIamResourceConnectionDetails,
projectId: string
): Promise<boolean> => {
try {
const credentials = await assumePamRole({ connectionDetails, projectId });
return credentials !== null;
} catch {
return false;
}
};
export const validateTargetRoleAssumption = async ({
connectionDetails,
targetRoleArn,
projectId
}: {
connectionDetails: TAwsIamResourceConnectionDetails;
targetRoleArn: string;
projectId: string;
}): Promise<boolean> => {
try {
const pamCredentials = await assumePamRole({ connectionDetails, projectId });
if (!pamCredentials) return false;
const targetCredentials = await assumeTargetRole({
pamCredentials,
targetRoleArn,
projectId,
roleSessionName: `infisical-pam-target-validation-${Date.now()}`
});
return targetCredentials !== null;
} catch {
return false;
}
};
/**
* Assumes the target role and generates a federated console sign-in URL.
*/
export const generateConsoleFederationUrl = async ({
connectionDetails,
targetRoleArn,
roleSessionName,
projectId,
sessionDuration
}: {
connectionDetails: TAwsIamResourceConnectionDetails;
targetRoleArn: string;
roleSessionName: string;
projectId: string;
sessionDuration: number;
}): Promise<{ consoleUrl: string; expiresAt: Date }> => {
const pamCredentials = await assumePamRole({
connectionDetails,
projectId,
sessionDuration,
sessionNameSuffix: "session",
throwOnError: true
});
const targetCredentials = await assumeTargetRole({
pamCredentials: pamCredentials!,
targetRoleArn,
projectId,
roleSessionName,
sessionDuration,
throwOnError: true
});
const { AccessKeyId, SecretAccessKey, SessionToken, Expiration } = targetCredentials!;
// Generate federation URL
const sessionJson = JSON.stringify({
sessionId: AccessKeyId,
sessionKey: SecretAccessKey,
sessionToken: SessionToken
});
const federationEndpoint = "https://signin.aws.amazon.com/federation";
const signinTokenUrl = `${federationEndpoint}?Action=getSigninToken&Session=${encodeURIComponent(sessionJson)}`;
const tokenResponse = await request.get<{ SigninToken?: string }>(signinTokenUrl);
if (!tokenResponse.data.SigninToken) {
throw new InternalServerError({
message: `AWS federation endpoint did not return a SigninToken: ${JSON.stringify(tokenResponse.data).substring(0, 200)}`
});
}
const consoleDestination = `https://console.aws.amazon.com/`;
const consoleUrl = `${federationEndpoint}?Action=login&SigninToken=${encodeURIComponent(tokenResponse.data.SigninToken)}&Destination=${encodeURIComponent(consoleDestination)}`;
return {
consoleUrl,
expiresAt: Expiration ?? new Date(Date.now() + sessionDuration * 1000)
};
};

View File

@@ -0,0 +1,110 @@
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { PamResource } from "../pam-resource-enums";
import {
TPamResourceFactory,
TPamResourceFactoryRotateAccountCredentials,
TPamResourceFactoryValidateAccountCredentials
} from "../pam-resource-types";
import { validatePamRoleConnection, validateTargetRoleAssumption } from "./aws-iam-federation";
import { TAwsIamAccountCredentials, TAwsIamResourceConnectionDetails } from "./aws-iam-resource-types";
export const awsIamResourceFactory: TPamResourceFactory<TAwsIamResourceConnectionDetails, TAwsIamAccountCredentials> = (
resourceType: PamResource,
connectionDetails: TAwsIamResourceConnectionDetails,
// AWS IAM doesn't use gateway
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_gatewayId,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_gatewayV2Service,
projectId
) => {
const validateConnection = async () => {
try {
const isValid = await validatePamRoleConnection(connectionDetails, projectId ?? "");
if (!isValid) {
throw new BadRequestError({
message:
"Unable to assume the PAM role. Verify the role ARN and ensure the trust policy allows Infisical to assume the role."
});
}
logger.info(
{ roleArn: connectionDetails.roleArn },
"[AWS IAM Resource Factory] PAM role connection validated successfully"
);
return connectionDetails;
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
logger.error(error, "[AWS IAM Resource Factory] Failed to validate PAM role connection");
throw new BadRequestError({
message: `Unable to validate connection to ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<TAwsIamAccountCredentials> = async (
credentials
) => {
try {
const isValid = await validateTargetRoleAssumption({
connectionDetails,
targetRoleArn: credentials.targetRoleArn,
projectId: projectId ?? ""
});
if (!isValid) {
throw new BadRequestError({
message: `Unable to assume the target role. Verify the target role ARN and ensure the PAM role (ARN: ${connectionDetails.roleArn}) has permission to assume it.`
});
}
logger.info(
{ targetRoleArn: credentials.targetRoleArn },
"[AWS IAM Resource Factory] Target role credentials validated successfully"
);
return credentials;
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
logger.error(error, "[AWS IAM Resource Factory] Failed to validate target role credentials");
throw new BadRequestError({
message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<TAwsIamAccountCredentials> = async (
_rotationAccountCredentials,
currentCredentials
) => {
return currentCredentials;
};
const handleOverwritePreventionForCensoredValues = async (
updatedAccountCredentials: TAwsIamAccountCredentials,
// AWS IAM has no censored credential values - role ARNs are not secrets
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_currentCredentials: TAwsIamAccountCredentials
) => {
return updatedAccountCredentials;
};
return {
validateConnection,
validateAccountCredentials,
rotateAccountCredentials,
handleOverwritePreventionForCensoredValues
};
};

View File

@@ -0,0 +1,24 @@
import RE2 from "re2";
import { BadRequestError } from "@app/lib/errors";
import { AwsIamResourceListItemSchema } from "./aws-iam-resource-schemas";
export const getAwsIamResourceListItem = () => {
return {
name: AwsIamResourceListItemSchema.shape.name.value,
resource: AwsIamResourceListItemSchema.shape.resource.value
};
};
/**
* Extract the AWS Account ID from an IAM Role ARN
* ARN format: arn:aws:iam::123456789012:role/RoleName
*/
export const extractAwsAccountIdFromArn = (roleArn: string): string => {
const match = roleArn.match(new RE2("^arn:aws:iam::(\\d{12}):role/"));
if (!match) {
throw new BadRequestError({ message: "Invalid IAM Role ARN format" });
}
return match[1];
};

View File

@@ -0,0 +1,81 @@
import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreatePamAccountSchema,
BaseCreatePamResourceSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdatePamAccountSchema,
BaseUpdatePamResourceSchema
} from "../pam-resource-schemas";
// AWS STS session duration limits (in seconds)
// Role chaining (Infisical → PAM role → target role) limits max session to 1 hour
// @see https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
const AWS_STS_MIN_SESSION_DURATION = 900; // 15 minutes
const AWS_STS_MAX_SESSION_DURATION_ROLE_CHAINING = 3600; // 1 hour
export const AwsIamResourceConnectionDetailsSchema = z.object({
roleArn: z.string().trim().min(1)
});
export const AwsIamAccountCredentialsSchema = z.object({
targetRoleArn: z.string().trim().min(1).max(2048),
defaultSessionDuration: z.coerce
.number()
.min(AWS_STS_MIN_SESSION_DURATION)
.max(AWS_STS_MAX_SESSION_DURATION_ROLE_CHAINING)
});
const BaseAwsIamResourceSchema = BasePamResourceSchema.extend({
resourceType: z.literal(PamResource.AwsIam),
gatewayId: z.string().uuid().nullable().optional()
});
export const AwsIamResourceSchema = BaseAwsIamResourceSchema.extend({
connectionDetails: AwsIamResourceConnectionDetailsSchema,
rotationAccountCredentials: AwsIamAccountCredentialsSchema.nullable().optional()
});
export const SanitizedAwsIamResourceSchema = BaseAwsIamResourceSchema.extend({
connectionDetails: AwsIamResourceConnectionDetailsSchema,
rotationAccountCredentials: AwsIamAccountCredentialsSchema.nullable().optional()
});
export const AwsIamResourceListItemSchema = z.object({
name: z.literal("AWS IAM"),
resource: z.literal(PamResource.AwsIam)
});
export const CreateAwsIamResourceSchema = BaseCreatePamResourceSchema.extend({
connectionDetails: AwsIamResourceConnectionDetailsSchema,
rotationAccountCredentials: AwsIamAccountCredentialsSchema.nullable().optional()
});
export const UpdateAwsIamResourceSchema = BaseUpdatePamResourceSchema.extend({
connectionDetails: AwsIamResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: AwsIamAccountCredentialsSchema.nullable().optional()
});
export const AwsIamAccountSchema = BasePamAccountSchema.extend({
credentials: AwsIamAccountCredentialsSchema
});
export const CreateAwsIamAccountSchema = BaseCreatePamAccountSchema.extend({
credentials: AwsIamAccountCredentialsSchema,
// AWS IAM accounts don't support credential rotation - they use role assumption
rotationEnabled: z.boolean().default(false)
});
export const UpdateAwsIamAccountSchema = BaseUpdatePamAccountSchema.extend({
credentials: AwsIamAccountCredentialsSchema.optional()
});
export const SanitizedAwsIamAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({
credentials: AwsIamAccountCredentialsSchema.pick({
targetRoleArn: true,
defaultSessionDuration: true
})
});

View File

@@ -0,0 +1,16 @@
import { z } from "zod";
import {
AwsIamAccountCredentialsSchema,
AwsIamAccountSchema,
AwsIamResourceConnectionDetailsSchema,
AwsIamResourceSchema
} from "./aws-iam-resource-schemas";
// Resources
export type TAwsIamResource = z.infer<typeof AwsIamResourceSchema>;
export type TAwsIamResourceConnectionDetails = z.infer<typeof AwsIamResourceConnectionDetailsSchema>;
// Accounts
export type TAwsIamAccount = z.infer<typeof AwsIamAccountSchema>;
export type TAwsIamAccountCredentials = z.infer<typeof AwsIamAccountCredentialsSchema>;

View File

@@ -0,0 +1,5 @@
export * from "./aws-iam-federation";
export * from "./aws-iam-resource-factory";
export * from "./aws-iam-resource-fns";
export * from "./aws-iam-resource-schemas";
export * from "./aws-iam-resource-types";

View File

@@ -0,0 +1,3 @@
export enum KubernetesAuthMethod {
ServiceAccountToken = "service-account-token"
}

View File

@@ -0,0 +1,225 @@
import axios, { AxiosError } from "axios";
import https from "https";
import { BadRequestError } from "@app/lib/errors";
import { GatewayProxyProtocol } from "@app/lib/gateway/types";
import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2";
import { logger } from "@app/lib/logger";
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
import { TGatewayV2ServiceFactory } from "../../gateway-v2/gateway-v2-service";
import { PamResource } from "../pam-resource-enums";
import {
TPamResourceFactory,
TPamResourceFactoryRotateAccountCredentials,
TPamResourceFactoryValidateAccountCredentials
} from "../pam-resource-types";
import { KubernetesAuthMethod } from "./kubernetes-resource-enums";
import { TKubernetesAccountCredentials, TKubernetesResourceConnectionDetails } from "./kubernetes-resource-types";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
export const executeWithGateway = async <T>(
config: {
connectionDetails: TKubernetesResourceConnectionDetails;
resourceType: PamResource;
gatewayId: string;
},
gatewayV2Service: Pick<TGatewayV2ServiceFactory, "getPlatformConnectionDetailsByGatewayId">,
operation: (baseUrl: string, httpsAgent: https.Agent) => Promise<T>
): Promise<T> => {
const { connectionDetails, gatewayId } = config;
const url = new URL(connectionDetails.url);
const [targetHost] = await verifyHostInputValidity(url.hostname, true);
let targetPort: number;
if (url.port) {
targetPort = Number(url.port);
} else if (url.protocol === "https:") {
targetPort = 443;
} else {
targetPort = 80;
}
const platformConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({
gatewayId,
targetHost,
targetPort
});
if (!platformConnectionDetails) {
throw new BadRequestError({ message: "Unable to connect to gateway, no platform connection details found" });
}
const httpsAgent = new https.Agent({
ca: connectionDetails.sslCertificate,
rejectUnauthorized: connectionDetails.sslRejectUnauthorized,
servername: targetHost
});
return withGatewayV2Proxy(
async (proxyPort) => {
const protocol = url.protocol === "https:" ? "https" : "http";
const baseUrl = `${protocol}://localhost:${proxyPort}`;
return operation(baseUrl, httpsAgent);
},
{
protocol: GatewayProxyProtocol.Tcp,
relayHost: platformConnectionDetails.relayHost,
gateway: platformConnectionDetails.gateway,
relay: platformConnectionDetails.relay,
httpsAgent
}
);
};
export const kubernetesResourceFactory: TPamResourceFactory<
TKubernetesResourceConnectionDetails,
TKubernetesAccountCredentials
> = (resourceType, connectionDetails, gatewayId, gatewayV2Service) => {
const validateConnection = async () => {
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required" });
}
try {
await executeWithGateway(
{ connectionDetails, gatewayId, resourceType },
gatewayV2Service,
async (baseUrl, httpsAgent) => {
// Validate connection by checking API server version
try {
await axios.get(`${baseUrl}/version`, {
...(httpsAgent ? { httpsAgent } : {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
} catch (error) {
if (error instanceof AxiosError) {
// If we get a 401/403, it means we reached the API server but need auth - that's fine for connection validation
if (error.response?.status === 401 || error.response?.status === 403) {
logger.info(
{ status: error.response.status },
"[Kubernetes Resource Factory] Kubernetes connection validation succeeded (auth required)"
);
return connectionDetails;
}
throw new BadRequestError({
message: `Unable to connect to Kubernetes API server: ${error.response?.statusText || error.message}`
});
}
throw error;
}
logger.info("[Kubernetes Resource Factory] Kubernetes connection validation succeeded");
return connectionDetails;
}
);
return connectionDetails;
} catch (error) {
throw new BadRequestError({
message: `Unable to validate connection to ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<
TKubernetesAccountCredentials
> = async (credentials) => {
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required" });
}
try {
await executeWithGateway(
{ connectionDetails, gatewayId, resourceType },
gatewayV2Service,
async (baseUrl, httpsAgent) => {
const { authMethod } = credentials;
if (authMethod === KubernetesAuthMethod.ServiceAccountToken) {
// Validate service account token using SelfSubjectReview API (whoami)
// This endpoint doesn't require any special permissions from the service account
try {
await axios.post(
`${baseUrl}/apis/authentication.k8s.io/v1/selfsubjectreviews`,
{
apiVersion: "authentication.k8s.io/v1",
kind: "SelfSubjectReview"
},
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${credentials.serviceAccountToken}`
},
...(httpsAgent ? { httpsAgent } : {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
logger.info("[Kubernetes Resource Factory] Kubernetes service account token authentication successful");
} catch (error) {
if (error instanceof AxiosError) {
if (error.response?.status === 401 || error.response?.status === 403) {
throw new BadRequestError({
message:
"Account credentials invalid. Service account token is not valid or does not have required permissions."
});
}
throw new BadRequestError({
message: `Unable to validate account credentials: ${error.response?.statusText || error.message}`
});
}
throw error;
}
} else {
throw new BadRequestError({
message: `Unsupported Kubernetes auth method: ${authMethod as string}`
});
}
}
);
return credentials;
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<
TKubernetesAccountCredentials
> = async () => {
throw new BadRequestError({
message: `Unable to rotate account credentials for ${resourceType}: not implemented`
});
};
const handleOverwritePreventionForCensoredValues = async (
updatedAccountCredentials: TKubernetesAccountCredentials,
currentCredentials: TKubernetesAccountCredentials
) => {
if (updatedAccountCredentials.authMethod !== currentCredentials.authMethod) {
return updatedAccountCredentials;
}
if (
updatedAccountCredentials.authMethod === KubernetesAuthMethod.ServiceAccountToken &&
currentCredentials.authMethod === KubernetesAuthMethod.ServiceAccountToken
) {
if (updatedAccountCredentials.serviceAccountToken === "__INFISICAL_UNCHANGED__") {
return {
...updatedAccountCredentials,
serviceAccountToken: currentCredentials.serviceAccountToken
};
}
}
return updatedAccountCredentials;
};
return {
validateConnection,
validateAccountCredentials,
rotateAccountCredentials,
handleOverwritePreventionForCensoredValues
};
};

View File

@@ -0,0 +1,8 @@
import { KubernetesResourceListItemSchema } from "./kubernetes-resource-schemas";
export const getKubernetesResourceListItem = () => {
return {
name: KubernetesResourceListItemSchema.shape.name.value,
resource: KubernetesResourceListItemSchema.shape.resource.value
};
};

View File

@@ -0,0 +1,94 @@
import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreateGatewayPamResourceSchema,
BaseCreatePamAccountSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdateGatewayPamResourceSchema,
BaseUpdatePamAccountSchema
} from "../pam-resource-schemas";
import { KubernetesAuthMethod } from "./kubernetes-resource-enums";
export const BaseKubernetesResourceSchema = BasePamResourceSchema.extend({
resourceType: z.literal(PamResource.Kubernetes)
});
export const KubernetesResourceListItemSchema = z.object({
name: z.literal("Kubernetes"),
resource: z.literal(PamResource.Kubernetes)
});
export const KubernetesResourceConnectionDetailsSchema = z.object({
url: z.string().url().trim().max(500),
sslRejectUnauthorized: z.boolean(),
sslCertificate: z
.string()
.trim()
.transform((value) => value || undefined)
.optional()
});
export const KubernetesServiceAccountTokenCredentialsSchema = z.object({
authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken),
serviceAccountToken: z.string().trim().max(10000)
});
export const KubernetesAccountCredentialsSchema = z.discriminatedUnion("authMethod", [
KubernetesServiceAccountTokenCredentialsSchema
]);
export const KubernetesResourceSchema = BaseKubernetesResourceSchema.extend({
connectionDetails: KubernetesResourceConnectionDetailsSchema,
rotationAccountCredentials: KubernetesAccountCredentialsSchema.nullable().optional()
});
export const SanitizedKubernetesResourceSchema = BaseKubernetesResourceSchema.extend({
connectionDetails: KubernetesResourceConnectionDetailsSchema,
rotationAccountCredentials: z
.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken)
})
])
.nullable()
.optional()
});
export const CreateKubernetesResourceSchema = BaseCreateGatewayPamResourceSchema.extend({
connectionDetails: KubernetesResourceConnectionDetailsSchema,
rotationAccountCredentials: KubernetesAccountCredentialsSchema.nullable().optional()
});
export const UpdateKubernetesResourceSchema = BaseUpdateGatewayPamResourceSchema.extend({
connectionDetails: KubernetesResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: KubernetesAccountCredentialsSchema.nullable().optional()
});
// Accounts
export const KubernetesAccountSchema = BasePamAccountSchema.extend({
credentials: KubernetesAccountCredentialsSchema
});
export const CreateKubernetesAccountSchema = BaseCreatePamAccountSchema.extend({
credentials: KubernetesAccountCredentialsSchema
});
export const UpdateKubernetesAccountSchema = BaseUpdatePamAccountSchema.extend({
credentials: KubernetesAccountCredentialsSchema.optional()
});
export const SanitizedKubernetesAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({
credentials: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken)
})
])
});
// Sessions
export const KubernetesSessionCredentialsSchema = KubernetesResourceConnectionDetailsSchema.and(
KubernetesAccountCredentialsSchema
);

View File

@@ -0,0 +1,16 @@
import { z } from "zod";
import {
KubernetesAccountCredentialsSchema,
KubernetesAccountSchema,
KubernetesResourceConnectionDetailsSchema,
KubernetesResourceSchema
} from "./kubernetes-resource-schemas";
// Resources
export type TKubernetesResource = z.infer<typeof KubernetesResourceSchema>;
export type TKubernetesResourceConnectionDetails = z.infer<typeof KubernetesResourceConnectionDetailsSchema>;
// Accounts
export type TKubernetesAccount = z.infer<typeof KubernetesAccountSchema>;
export type TKubernetesAccountCredentials = z.infer<typeof KubernetesAccountCredentialsSchema>;

View File

@@ -2,13 +2,13 @@ import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreateGatewayPamResourceSchema,
BaseCreatePamAccountSchema,
BaseCreatePamResourceSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdatePamAccountSchema,
BaseUpdatePamResourceSchema
BaseUpdateGatewayPamResourceSchema,
BaseUpdatePamAccountSchema
} from "../pam-resource-schemas";
import {
BaseSqlAccountCredentialsSchema,
@@ -43,12 +43,12 @@ export const MySQLResourceListItemSchema = z.object({
resource: z.literal(PamResource.MySQL)
});
export const CreateMySQLResourceSchema = BaseCreatePamResourceSchema.extend({
export const CreateMySQLResourceSchema = BaseCreateGatewayPamResourceSchema.extend({
connectionDetails: MySQLResourceConnectionDetailsSchema,
rotationAccountCredentials: MySQLAccountCredentialsSchema.nullable().optional()
});
export const UpdateMySQLResourceSchema = BaseUpdatePamResourceSchema.extend({
export const UpdateMySQLResourceSchema = BaseUpdateGatewayPamResourceSchema.extend({
connectionDetails: MySQLResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: MySQLAccountCredentialsSchema.nullable().optional()
});

View File

@@ -14,7 +14,7 @@ export const pamResourceDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
const doc = await (tx || db.replicaNode())(TableName.PamResource)
.join(TableName.GatewayV2, `${TableName.PamResource}.gatewayId`, `${TableName.GatewayV2}.id`)
.leftJoin(TableName.GatewayV2, `${TableName.PamResource}.gatewayId`, `${TableName.GatewayV2}.id`)
.select(selectAllTableCols(TableName.PamResource))
.select(db.ref("name").withSchema(TableName.GatewayV2).as("gatewayName"))
.select(db.ref("identityId").withSchema(TableName.GatewayV2).as("gatewayIdentityId"))

View File

@@ -1,7 +1,9 @@
export enum PamResource {
Postgres = "postgres",
MySQL = "mysql",
SSH = "ssh"
SSH = "ssh",
Kubernetes = "kubernetes",
AwsIam = "aws-iam"
}
export enum PamResourceOrderBy {

View File

@@ -1,3 +1,5 @@
import { awsIamResourceFactory } from "./aws-iam/aws-iam-resource-factory";
import { kubernetesResourceFactory } from "./kubernetes/kubernetes-resource-factory";
import { PamResource } from "./pam-resource-enums";
import { TPamAccountCredentials, TPamResourceConnectionDetails, TPamResourceFactory } from "./pam-resource-types";
import { sqlResourceFactory } from "./shared/sql/sql-resource-factory";
@@ -8,5 +10,7 @@ type TPamResourceFactoryImplementation = TPamResourceFactory<TPamResourceConnect
export const PAM_RESOURCE_FACTORY_MAP: Record<PamResource, TPamResourceFactoryImplementation> = {
[PamResource.Postgres]: sqlResourceFactory as TPamResourceFactoryImplementation,
[PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation,
[PamResource.SSH]: sshResourceFactory as TPamResourceFactoryImplementation
[PamResource.SSH]: sshResourceFactory as TPamResourceFactoryImplementation,
[PamResource.Kubernetes]: kubernetesResourceFactory as TPamResourceFactoryImplementation,
[PamResource.AwsIam]: awsIamResourceFactory as TPamResourceFactoryImplementation
};

View File

@@ -3,12 +3,19 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { decryptAccountCredentials } from "../pam-account/pam-account-fns";
import { getAwsIamResourceListItem } from "./aws-iam/aws-iam-resource-fns";
import { getKubernetesResourceListItem } from "./kubernetes/kubernetes-resource-fns";
import { getMySQLResourceListItem } from "./mysql/mysql-resource-fns";
import { TPamResource, TPamResourceConnectionDetails } from "./pam-resource-types";
import { getPostgresResourceListItem } from "./postgres/postgres-resource-fns";
export const listResourceOptions = () => {
return [getPostgresResourceListItem(), getMySQLResourceListItem()].sort((a, b) => a.name.localeCompare(b.name));
return [
getPostgresResourceListItem(),
getMySQLResourceListItem(),
getAwsIamResourceListItem(),
getKubernetesResourceListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
// Resource

View File

@@ -3,6 +3,18 @@ import { z } from "zod";
import { PamAccountsSchema, PamResourcesSchema } from "@app/db/schemas";
import { slugSchema } from "@app/server/lib/schemas";
export const GatewayAccessResponseSchema = z.object({
sessionId: z.string(),
relayClientCertificate: z.string(),
relayClientPrivateKey: z.string(),
relayServerCertificateChain: z.string(),
gatewayClientCertificate: z.string(),
gatewayClientPrivateKey: z.string(),
gatewayServerCertificateChain: z.string(),
relayHost: z.string(),
metadata: z.record(z.string(), z.string().optional()).optional()
});
// Resources
export const BasePamResourceSchema = PamResourcesSchema.omit({
encryptedConnectionDetails: true,
@@ -10,17 +22,27 @@ export const BasePamResourceSchema = PamResourcesSchema.omit({
resourceType: true
});
export const BaseCreatePamResourceSchema = z.object({
const CoreCreatePamResourceSchema = z.object({
projectId: z.string().uuid(),
gatewayId: z.string().uuid(),
name: slugSchema({ field: "name" })
});
export const BaseUpdatePamResourceSchema = z.object({
gatewayId: z.string().uuid().optional(),
export const BaseCreateGatewayPamResourceSchema = CoreCreatePamResourceSchema.extend({
gatewayId: z.string().uuid()
});
export const BaseCreatePamResourceSchema = CoreCreatePamResourceSchema;
const CoreUpdatePamResourceSchema = z.object({
name: slugSchema({ field: "name" }).optional()
});
export const BaseUpdateGatewayPamResourceSchema = CoreUpdatePamResourceSchema.extend({
gatewayId: z.string().uuid().optional()
});
export const BaseUpdatePamResourceSchema = CoreUpdatePamResourceSchema;
// Accounts
export const BasePamAccountSchema = PamAccountsSchema.omit({
encryptedCredentials: true

View File

@@ -92,7 +92,8 @@ export const pamResourceServiceFactory = ({
resourceType,
connectionDetails,
gatewayId,
gatewayV2Service
gatewayV2Service,
projectId
);
const validatedConnectionDetails = await factory.validateConnection();
@@ -162,7 +163,8 @@ export const pamResourceServiceFactory = ({
resource.resourceType as PamResource,
connectionDetails,
resource.gatewayId,
gatewayV2Service
gatewayV2Service,
resource.projectId
);
const validatedConnectionDetails = await factory.validateConnection();
const encryptedConnectionDetails = await encryptResourceConnectionDetails({
@@ -189,7 +191,8 @@ export const pamResourceServiceFactory = ({
resource.resourceType as PamResource,
decryptedConnectionDetails,
resource.gatewayId,
gatewayV2Service
gatewayV2Service,
resource.projectId
);
let finalCredentials = { ...rotationAccountCredentials };

View File

@@ -1,6 +1,18 @@
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service";
import {
TAwsIamAccount,
TAwsIamAccountCredentials,
TAwsIamResource,
TAwsIamResourceConnectionDetails
} from "./aws-iam/aws-iam-resource-types";
import {
TKubernetesAccount,
TKubernetesAccountCredentials,
TKubernetesResource,
TKubernetesResourceConnectionDetails
} from "./kubernetes/kubernetes-resource-types";
import {
TMySQLAccount,
TMySQLAccountCredentials,
@@ -22,22 +34,30 @@ import {
} from "./ssh/ssh-resource-types";
// Resource types
export type TPamResource = TPostgresResource | TMySQLResource | TSSHResource;
export type TPamResource = TPostgresResource | TMySQLResource | TSSHResource | TAwsIamResource | TKubernetesResource;
export type TPamResourceConnectionDetails =
| TPostgresResourceConnectionDetails
| TMySQLResourceConnectionDetails
| TSSHResourceConnectionDetails;
| TSSHResourceConnectionDetails
| TKubernetesResourceConnectionDetails
| TAwsIamResourceConnectionDetails;
// Account types
export type TPamAccount = TPostgresAccount | TMySQLAccount | TSSHAccount;
export type TPamAccount = TPostgresAccount | TMySQLAccount | TSSHAccount | TAwsIamAccount | TKubernetesAccount;
export type TPamAccountCredentials =
| TPostgresAccountCredentials
// eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents
export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials | TSSHAccountCredentials;
| TMySQLAccountCredentials
| TSSHAccountCredentials
| TKubernetesAccountCredentials
| TAwsIamAccountCredentials;
// Resource DTOs
export type TCreateResourceDTO = Pick<
TPamResource,
"name" | "connectionDetails" | "resourceType" | "gatewayId" | "projectId" | "rotationAccountCredentials"
>;
export type TCreateResourceDTO = Pick<TPamResource, "name" | "connectionDetails" | "resourceType" | "projectId"> & {
gatewayId?: string | null;
rotationAccountCredentials?: TPamAccountCredentials | null;
};
export type TUpdateResourceDTO = Partial<Omit<TCreateResourceDTO, "resourceType" | "projectId">> & {
resourceId: string;
@@ -65,8 +85,9 @@ export type TPamResourceFactoryRotateAccountCredentials<C extends TPamAccountCre
export type TPamResourceFactory<T extends TPamResourceConnectionDetails, C extends TPamAccountCredentials> = (
resourceType: PamResource,
connectionDetails: T,
gatewayId: string,
gatewayV2Service: Pick<TGatewayV2ServiceFactory, "getPlatformConnectionDetailsByGatewayId">
gatewayId: string | null | undefined,
gatewayV2Service: Pick<TGatewayV2ServiceFactory, "getPlatformConnectionDetailsByGatewayId">,
projectId: string | null | undefined
) => {
validateConnection: TPamResourceFactoryValidateConnection<T>;
validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<C>;

View File

@@ -2,13 +2,13 @@ import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreateGatewayPamResourceSchema,
BaseCreatePamAccountSchema,
BaseCreatePamResourceSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdatePamAccountSchema,
BaseUpdatePamResourceSchema
BaseUpdateGatewayPamResourceSchema,
BaseUpdatePamAccountSchema
} from "../pam-resource-schemas";
import {
BaseSqlAccountCredentialsSchema,
@@ -40,12 +40,12 @@ export const PostgresResourceListItemSchema = z.object({
resource: z.literal(PamResource.Postgres)
});
export const CreatePostgresResourceSchema = BaseCreatePamResourceSchema.extend({
export const CreatePostgresResourceSchema = BaseCreateGatewayPamResourceSchema.extend({
connectionDetails: PostgresResourceConnectionDetailsSchema,
rotationAccountCredentials: PostgresAccountCredentialsSchema.nullable().optional()
});
export const UpdatePostgresResourceSchema = BaseUpdatePamResourceSchema.extend({
export const UpdatePostgresResourceSchema = BaseUpdateGatewayPamResourceSchema.extend({
connectionDetails: PostgresResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: PostgresAccountCredentialsSchema.nullable().optional()
});

View File

@@ -233,6 +233,10 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
gatewayV2Service
) => {
const validateConnection = async () => {
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required" });
}
try {
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (client) => {
await client.validate(true);
@@ -255,6 +259,10 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
credentials
) => {
try {
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required" });
}
await executeWithGateway(
{
connectionDetails,
@@ -296,6 +304,10 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
currentCredentials
) => {
const newPassword = alphaNumericNanoId(32);
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required" });
}
try {
return await executeWithGateway(
{

View File

@@ -60,6 +60,10 @@ export const sshResourceFactory: TPamResourceFactory<TSSHResourceConnectionDetai
) => {
const validateConnection = async () => {
try {
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required" });
}
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => {
return new Promise<void>((resolve, reject) => {
const client = new Client();
@@ -131,6 +135,10 @@ export const sshResourceFactory: TPamResourceFactory<TSSHResourceConnectionDetai
credentials
) => {
try {
if (!gatewayId) {
throw new BadRequestError({ message: "Gateway ID is required" });
}
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => {
return new Promise<void>((resolve, reject) => {
const client = new Client();

View File

@@ -2,13 +2,13 @@ import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreateGatewayPamResourceSchema,
BaseCreatePamAccountSchema,
BaseCreatePamResourceSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdatePamAccountSchema,
BaseUpdatePamResourceSchema
BaseUpdateGatewayPamResourceSchema,
BaseUpdatePamAccountSchema
} from "../pam-resource-schemas";
import { SSHAuthMethod } from "./ssh-resource-enums";
@@ -73,12 +73,12 @@ export const SanitizedSSHResourceSchema = BaseSSHResourceSchema.extend({
.optional()
});
export const CreateSSHResourceSchema = BaseCreatePamResourceSchema.extend({
export const CreateSSHResourceSchema = BaseCreateGatewayPamResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema,
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});
export const UpdateSSHResourceSchema = BaseUpdatePamResourceSchema.extend({
export const UpdateSSHResourceSchema = BaseUpdateGatewayPamResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});

View File

@@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { PamSessionStatus } from "./pam-session-enums";
export type TPamSessionDALFactory = ReturnType<typeof pamSessionDALFactory>;
export const pamSessionDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.PamSession);
@@ -22,5 +24,19 @@ export const pamSessionDALFactory = (db: TDbClient) => {
return session;
};
return { ...orm, findById };
const expireSessionById = async (sessionId: string, tx?: Knex) => {
const now = new Date();
const updatedCount = await (tx || db)(TableName.PamSession)
.where("id", sessionId)
.whereIn("status", [PamSessionStatus.Active, PamSessionStatus.Starting])
.update({
status: PamSessionStatus.Ended,
endedAt: now
});
return updatedCount;
};
return { ...orm, findById, expireSessionById };
};

View File

@@ -1,6 +1,6 @@
export enum PamSessionStatus {
Starting = "starting", // Starting, user connecting to resource
Active = "active", // Active, user is connected to resource
Ended = "ended", // Ended by user
Ended = "ended", // Ended by user or automatically expired after expiresAt timestamp
Terminated = "terminated" // Terminated by an admin
}

View File

@@ -11,6 +11,8 @@ export const PamSessionCommandLogSchema = z.object({
// SSH Terminal Event schemas
export const TerminalEventTypeSchema = z.enum(["input", "output", "resize", "error"]);
export const HttpEventTypeSchema = z.enum(["request", "response"]);
export const TerminalEventSchema = z.object({
timestamp: z.coerce.date(),
eventType: TerminalEventTypeSchema,
@@ -18,8 +20,29 @@ export const TerminalEventSchema = z.object({
elapsedTime: z.number() // Seconds since session start (for replay)
});
export const HttpBaseEventSchema = z.object({
timestamp: z.coerce.date(),
requestId: z.string(),
eventType: TerminalEventTypeSchema,
headers: z.record(z.string(), z.array(z.string())),
body: z.string().optional()
});
export const HttpRequestEventSchema = HttpBaseEventSchema.extend({
eventType: z.literal(HttpEventTypeSchema.Values.request),
method: z.string(),
url: z.string()
});
export const HttpResponseEventSchema = HttpBaseEventSchema.extend({
eventType: z.literal(HttpEventTypeSchema.Values.response),
status: z.string()
});
export const HttpEventSchema = z.discriminatedUnion("eventType", [HttpRequestEventSchema, HttpResponseEventSchema]);
export const SanitizedSessionSchema = PamSessionsSchema.omit({
encryptedLogsBlob: true
}).extend({
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
logs: z.array(z.union([PamSessionCommandLogSchema, HttpEventSchema, TerminalEventSchema]))
});

View File

@@ -34,9 +34,40 @@ export const pamSessionServiceFactory = ({
licenseService,
kmsService
}: TPamSessionServiceFactoryDep) => {
// Helper to check and update expired sessions when viewing session details (redundancy for scheduled job)
// Only applies to non-gateway sessions (e.g., AWS IAM) - gateway sessions are managed by the gateway
// This is intentionally only called in getById (session details view), not in list
const checkAndExpireSessionIfNeeded = async <
T extends { id: string; status: string; expiresAt: Date | null; gatewayIdentityId?: string | null }
>(
session: T
): Promise<T> => {
// Skip gateway-based sessions - they have their own lifecycle managed by the gateway
if (session.gatewayIdentityId) {
return session;
}
const isActive = session.status === PamSessionStatus.Active || session.status === PamSessionStatus.Starting;
const isExpired = session.expiresAt && new Date(session.expiresAt) <= new Date();
if (isActive && isExpired) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const updatedSession = await pamSessionDAL.updateById(session.id, {
status: PamSessionStatus.Ended,
endedAt: new Date()
});
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
return { ...session, ...updatedSession };
}
return session;
};
const getById = async (sessionId: string, actor: OrgServiceActor) => {
const session = await pamSessionDAL.findById(sessionId);
if (!session) throw new NotFoundError({ message: `Session with ID '${sessionId}' not found` });
const sessionFromDb = await pamSessionDAL.findById(sessionId);
if (!sessionFromDb) throw new NotFoundError({ message: `Session with ID '${sessionId}' not found` });
const session = await checkAndExpireSessionIfNeeded(sessionFromDb);
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
@@ -116,7 +147,7 @@ export const pamSessionServiceFactory = ({
OrgPermissionSubjects.Gateway
);
if (session.gatewayIdentityId !== actor.id) {
if (session.gatewayIdentityId && session.gatewayIdentityId !== actor.id) {
throw new ForbiddenRequestError({ message: "Identity does not have access to update logs for this session" });
}
@@ -158,7 +189,7 @@ export const pamSessionServiceFactory = ({
OrgPermissionSubjects.Gateway
);
if (session.gatewayIdentityId !== actor.id) {
if (session.gatewayIdentityId && session.gatewayIdentityId !== actor.id) {
throw new ForbiddenRequestError({ message: "Identity does not have access to end this session" });
}
} else if (actor.type === ActorType.USER) {

View File

@@ -1,13 +1,19 @@
import { z } from "zod";
import { PamSessionCommandLogSchema, SanitizedSessionSchema, TerminalEventSchema } from "./pam-session-schemas";
import {
HttpEventSchema,
PamSessionCommandLogSchema,
SanitizedSessionSchema,
TerminalEventSchema
} from "./pam-session-schemas";
export type TPamSessionCommandLog = z.infer<typeof PamSessionCommandLogSchema>;
export type TTerminalEvent = z.infer<typeof TerminalEventSchema>;
export type THttpEvent = z.infer<typeof HttpEventSchema>;
export type TPamSanitizedSession = z.infer<typeof SanitizedSessionSchema>;
// DTOs
export type TUpdateSessionLogsDTO = {
sessionId: string;
logs: (TPamSessionCommandLog | TTerminalEvent)[];
logs: (TPamSessionCommandLog | TTerminalEvent | THttpEvent)[];
};

View File

@@ -3,8 +3,11 @@ import { AbilityBuilder, createMongoAbility, MongoAbility } from "@casl/ability"
import {
ProjectPermissionActions,
ProjectPermissionAppConnectionActions,
ProjectPermissionApprovalRequestActions,
ProjectPermissionApprovalRequestGrantActions,
ProjectPermissionAuditLogsActions,
ProjectPermissionCertificateActions,
ProjectPermissionCertificateAuthorityActions,
ProjectPermissionCertificateProfileActions,
ProjectPermissionCmekActions,
ProjectPermissionCommitsActions,
@@ -44,9 +47,7 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.Settings,
ProjectPermissionSub.Environments,
ProjectPermissionSub.Tags,
ProjectPermissionSub.AuditLogs,
ProjectPermissionSub.IpAllowList,
ProjectPermissionSub.CertificateAuthorities,
ProjectPermissionSub.PkiAlerts,
ProjectPermissionSub.PkiCollections,
ProjectPermissionSub.SshCertificateAuthorities,
@@ -67,6 +68,20 @@ const buildAdminPermissionRules = () => {
);
});
can([ProjectPermissionAuditLogsActions.Read], ProjectPermissionSub.AuditLogs);
can(
[
ProjectPermissionCertificateAuthorityActions.Read,
ProjectPermissionCertificateAuthorityActions.Create,
ProjectPermissionCertificateAuthorityActions.Edit,
ProjectPermissionCertificateAuthorityActions.Delete,
ProjectPermissionCertificateAuthorityActions.Renew,
ProjectPermissionCertificateAuthorityActions.SignIntermediate
],
ProjectPermissionSub.CertificateAuthorities
);
can(
[
ProjectPermissionPkiTemplateActions.Read,
@@ -95,7 +110,8 @@ const buildAdminPermissionRules = () => {
ProjectPermissionCertificateActions.Edit,
ProjectPermissionCertificateActions.Create,
ProjectPermissionCertificateActions.Delete,
ProjectPermissionCertificateActions.ReadPrivateKey
ProjectPermissionCertificateActions.ReadPrivateKey,
ProjectPermissionCertificateActions.Import
],
ProjectPermissionSub.Certificates
);
@@ -325,6 +341,16 @@ const buildAdminPermissionRules = () => {
can([ProjectPermissionPamSessionActions.Read], ProjectPermissionSub.PamSessions);
can(
[ProjectPermissionApprovalRequestActions.Read, ProjectPermissionApprovalRequestActions.Create],
ProjectPermissionSub.ApprovalRequests
);
can(
[ProjectPermissionApprovalRequestGrantActions.Read, ProjectPermissionApprovalRequestGrantActions.Revoke],
ProjectPermissionSub.ApprovalRequestGrants
);
return rules;
};
@@ -460,7 +486,7 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateAuthorities);
can([ProjectPermissionCertificateAuthorityActions.Read], ProjectPermissionSub.CertificateAuthorities);
can([ProjectPermissionPkiTemplateActions.Read], ProjectPermissionSub.CertificateTemplates);
can(
@@ -468,7 +494,8 @@ const buildMemberPermissionRules = () => {
ProjectPermissionCertificateActions.Read,
ProjectPermissionCertificateActions.Edit,
ProjectPermissionCertificateActions.Create,
ProjectPermissionCertificateActions.Delete
ProjectPermissionCertificateActions.Delete,
ProjectPermissionCertificateActions.Import
],
ProjectPermissionSub.Certificates
);
@@ -571,6 +598,8 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.PamAccounts
);
can([ProjectPermissionApprovalRequestActions.Create], ProjectPermissionSub.ApprovalRequests);
return rules;
};
@@ -599,7 +628,7 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
can(ProjectPermissionAuditLogsActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionCertificateAuthorityActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates);
can(ProjectPermissionPkiTemplateActions.Read, ProjectPermissionSub.CertificateTemplates);
can(ProjectPermissionCmekActions.Read, ProjectPermissionSub.Cmek);

View File

@@ -23,12 +23,22 @@ export enum ProjectPermissionCommitsActions {
PerformRollback = "perform-rollback"
}
export enum ProjectPermissionCertificateAuthorityActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
Renew = "renew",
SignIntermediate = "sign-intermediate"
}
export enum ProjectPermissionCertificateActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
ReadPrivateKey = "read-private-key"
ReadPrivateKey = "read-private-key",
Import = "import"
}
export enum ProjectPermissionSecretActions {
@@ -214,6 +224,16 @@ export enum ProjectPermissionPamSessionActions {
// Terminate = "terminate"
}
export enum ProjectPermissionApprovalRequestActions {
Read = "read",
Create = "create"
}
export enum ProjectPermissionApprovalRequestGrantActions {
Read = "read",
Revoke = "revoke"
}
export const isCustomProjectRole = (slug: string) =>
!Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
@@ -264,7 +284,9 @@ export enum ProjectPermissionSub {
PamResources = "pam-resources",
PamAccounts = "pam-accounts",
PamSessions = "pam-sessions",
CertificateProfiles = "certificate-profiles"
CertificateProfiles = "certificate-profiles",
ApprovalRequests = "approval-requests",
ApprovalRequestGrants = "approval-request-grants"
}
export type SecretSubjectFields = {
@@ -292,7 +314,8 @@ export type SecretSyncSubjectFields = {
};
export type PkiSyncSubjectFields = {
subscriberName: string;
subscriberName?: string;
name: string;
};
export type DynamicSecretSubjectFields = {
@@ -332,6 +355,26 @@ export type PkiSubscriberSubjectFields = {
// (dangtony98): consider adding [commonName] as a subject field in the future
};
export type CertificateAuthoritySubjectFields = {
name: string;
};
export type CertificateSubjectFields = {
commonName?: string;
altNames?: string;
serialNumber?: string;
friendlyName?: string;
status?: string;
};
export type CertificateProfileSubjectFields = {
slug: string;
};
export type CertificateTemplateV2SubjectFields = {
name: string;
};
export type AppConnectionSubjectFields = {
connectionId: string;
};
@@ -399,8 +442,17 @@ export type ProjectPermissionSet =
ProjectPermissionIdentityActions,
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
| [ProjectPermissionCertificateActions, ProjectPermissionSub.Certificates]
| [
ProjectPermissionCertificateAuthorityActions,
(
| ProjectPermissionSub.CertificateAuthorities
| (ForcedSubject<ProjectPermissionSub.CertificateAuthorities> & CertificateAuthoritySubjectFields)
)
]
| [
ProjectPermissionCertificateActions,
ProjectPermissionSub.Certificates | (ForcedSubject<ProjectPermissionSub.Certificates> & CertificateSubjectFields)
]
| [
ProjectPermissionPkiTemplateActions,
(
@@ -454,7 +506,15 @@ export type ProjectPermissionSet =
ProjectPermissionSub.PamAccounts | (ForcedSubject<ProjectPermissionSub.PamAccounts> & PamAccountSubjectFields)
]
| [ProjectPermissionPamSessionActions, ProjectPermissionSub.PamSessions]
| [ProjectPermissionCertificateProfileActions, ProjectPermissionSub.CertificateProfiles];
| [
ProjectPermissionCertificateProfileActions,
(
| ProjectPermissionSub.CertificateProfiles
| (ForcedSubject<ProjectPermissionSub.CertificateProfiles> & CertificateProfileSubjectFields)
)
]
| [ProjectPermissionApprovalRequestActions, ProjectPermissionSub.ApprovalRequests]
| [ProjectPermissionApprovalRequestGrantActions, ProjectPermissionSub.ApprovalRequestGrants];
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
@@ -572,6 +632,17 @@ const SecretSyncConditionV2Schema = z
const PkiSyncConditionSchema = z
.object({
name: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
subscriberName: z.union([
z.string(),
z
@@ -698,6 +769,7 @@ const PkiTemplateConditionSchema = z
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
@@ -749,6 +821,98 @@ const PamAccountConditionSchema = z
})
.partial();
const CertificateAuthorityConditionSchema = z
.object({
name: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
])
})
.partial();
const CertificateConditionSchema = z
.object({
commonName: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
altNames: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
serialNumber: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
friendlyName: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
status: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
])
})
.partial();
const CertificateProfileConditionSchema = z
.object({
slug: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
])
})
.partial();
const GeneralPermissionSchema = [
z.object({
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
@@ -828,18 +992,6 @@ const GeneralPermissionSchema = [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.CertificateAuthorities).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Certificates).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCertificateActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(ProjectPermissionSub.SshCertificateAuthorities)
@@ -967,6 +1119,18 @@ const GeneralPermissionSchema = [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionPamSessionActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.ApprovalRequests).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionApprovalRequestActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.ApprovalRequestGrants).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionApprovalRequestGrantActions).describe(
"Describe what action an entity can take."
)
})
];
@@ -1130,7 +1294,30 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCertificateProfileActions).describe(
"Describe what action an entity can take."
)
),
conditions: CertificateProfileConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.CertificateAuthorities).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCertificateAuthorityActions).describe(
"Describe what action an entity can take."
),
conditions: CertificateAuthorityConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.Certificates).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCertificateActions).describe(
"Describe what action an entity can take."
),
conditions: CertificateConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
...GeneralPermissionSchema
]);

View File

@@ -122,6 +122,11 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
const result = await (tx || db)(TableName.PkiAcmeChallenge)
.join(TableName.PkiAcmeAuth, `${TableName.PkiAcmeChallenge}.authId`, `${TableName.PkiAcmeAuth}.id`)
.join(TableName.PkiAcmeAccount, `${TableName.PkiAcmeAuth}.accountId`, `${TableName.PkiAcmeAccount}.id`)
.join(
TableName.PkiCertificateProfile,
`${TableName.PkiAcmeAccount}.profileId`,
`${TableName.PkiCertificateProfile}.id`
)
.select(
selectAllTableCols(TableName.PkiAcmeChallenge),
db.ref("id").withSchema(TableName.PkiAcmeAuth).as("authId"),
@@ -131,7 +136,9 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
db.ref("identifierValue").withSchema(TableName.PkiAcmeAuth).as("authIdentifierValue"),
db.ref("expiresAt").withSchema(TableName.PkiAcmeAuth).as("authExpiresAt"),
db.ref("id").withSchema(TableName.PkiAcmeAccount).as("accountId"),
db.ref("publicKeyThumbprint").withSchema(TableName.PkiAcmeAccount).as("accountPublicKeyThumbprint")
db.ref("publicKeyThumbprint").withSchema(TableName.PkiAcmeAccount).as("accountPublicKeyThumbprint"),
db.ref("profileId").withSchema(TableName.PkiAcmeAccount).as("profileId"),
db.ref("projectId").withSchema(TableName.PkiCertificateProfile).as("projectId")
)
// For all challenges, acquire update lock on the auth to avoid race conditions
.forUpdate(TableName.PkiAcmeAuth)
@@ -149,6 +156,8 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
authExpiresAt,
accountId,
accountPublicKeyThumbprint,
profileId,
projectId,
...challenge
} = result;
return {
@@ -161,7 +170,11 @@ export const pkiAcmeChallengeDALFactory = (db: TDbClient) => {
expiresAt: authExpiresAt,
account: {
id: accountId,
publicKeyThumbprint: accountPublicKeyThumbprint
publicKeyThumbprint: accountPublicKeyThumbprint,
project: {
id: projectId
},
profileId
}
}
};

View File

@@ -1,10 +1,13 @@
import axios, { AxiosError } from "axios";
import { TPkiAcmeChallenges } from "@app/db/schemas/pki-acme-challenges";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { isPrivateIp } from "@app/lib/ip/ipRange";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types";
import { TPkiAcmeChallengeDALFactory } from "./pki-acme-challenge-dal";
import {
AcmeConnectionError,
@@ -18,17 +21,22 @@ import { TPkiAcmeChallengeServiceFactory } from "./pki-acme-types";
type TPkiAcmeChallengeServiceFactoryDep = {
acmeChallengeDAL: Pick<
TPkiAcmeChallengeDALFactory,
"transaction" | "findByIdForChallengeValidation" | "markAsValidCascadeById" | "markAsInvalidCascadeById"
| "transaction"
| "findByIdForChallengeValidation"
| "markAsValidCascadeById"
| "markAsInvalidCascadeById"
| "updateById"
>;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
};
export const pkiAcmeChallengeServiceFactory = ({
acmeChallengeDAL
acmeChallengeDAL,
auditLogService
}: TPkiAcmeChallengeServiceFactoryDep): TPkiAcmeChallengeServiceFactory => {
const appCfg = getConfig();
const validateChallengeResponse = async (challengeId: string): Promise<void> => {
const error: Error | undefined = await acmeChallengeDAL.transaction(async (tx) => {
const markChallengeAsReady = async (challengeId: string): Promise<TPkiAcmeChallenges> => {
return acmeChallengeDAL.transaction(async (tx) => {
logger.info({ challengeId }, "Validating ACME challenge response");
const challenge = await acmeChallengeDAL.findByIdForChallengeValidation(challengeId, tx);
if (!challenge) {
@@ -52,11 +60,27 @@ export const pkiAcmeChallengeServiceFactory = ({
if (challenge.type !== AcmeChallengeType.HTTP_01) {
throw new BadRequestError({ message: "Only HTTP-01 challenges are supported for now" });
}
let host = challenge.auth.identifierValue;
const host = challenge.auth.identifierValue;
// check if host is a private ip address
if (isPrivateIp(host)) {
throw new BadRequestError({ message: "Private IP addresses are not allowed" });
}
return acmeChallengeDAL.updateById(challengeId, { status: AcmeChallengeStatus.Processing }, tx);
});
};
const validateChallengeResponse = async (challengeId: string, retryCount: number): Promise<void> => {
logger.info({ challengeId, retryCount }, "Validating ACME challenge response");
const challenge = await acmeChallengeDAL.findByIdForChallengeValidation(challengeId);
if (!challenge) {
throw new NotFoundError({ message: "ACME challenge not found" });
}
if (challenge.status !== AcmeChallengeStatus.Processing) {
throw new BadRequestError({
message: `ACME challenge is ${challenge.status} instead of ${AcmeChallengeStatus.Processing}`
});
}
let host = challenge.auth.identifierValue;
if (appCfg.isAcmeDevelopmentMode && appCfg.ACME_DEVELOPMENT_HTTP01_CHALLENGE_HOST_OVERRIDES[host]) {
host = appCfg.ACME_DEVELOPMENT_HTTP01_CHALLENGE_HOST_OVERRIDES[host];
logger.warn(
@@ -91,10 +115,37 @@ export const pkiAcmeChallengeServiceFactory = ({
if (challengeResponseBody.trimEnd() !== expectedChallengeResponseBody) {
throw new AcmeIncorrectResponseError({ message: "ACME challenge response is not correct" });
}
await acmeChallengeDAL.markAsValidCascadeById(challengeId, tx);
logger.info({ challengeId }, "ACME challenge response is correct, marking challenge as valid");
await acmeChallengeDAL.markAsValidCascadeById(challengeId);
await auditLogService.createAuditLog({
projectId: challenge.auth.account.project.id,
actor: {
type: ActorType.ACME_ACCOUNT,
metadata: {
profileId: challenge.auth.account.profileId,
accountId: challenge.auth.account.id
}
},
event: {
type: EventType.PASS_ACME_CHALLENGE,
metadata: {
challengeId,
type: challenge.type as AcmeChallengeType
}
}
});
} catch (exp) {
// TODO: we should retry the challenge validation a few times, but let's keep it simple for now
await acmeChallengeDAL.markAsInvalidCascadeById(challengeId, tx);
let finalAttempt = false;
if (retryCount >= 2) {
logger.error(
exp,
`Last attempt to validate ACME challenge response failed, marking ${challengeId} challenge as invalid`
);
// This is the last attempt to validate the challenge response, if it fails, we mark the challenge as invalid
await acmeChallengeDAL.markAsInvalidCascadeById(challengeId);
finalAttempt = true;
}
try {
// Properly type and inspect the error
if (axios.isAxiosError(exp)) {
const axiosError = exp as AxiosError;
@@ -102,31 +153,51 @@ export const pkiAcmeChallengeServiceFactory = ({
const errorMessage = axiosError.message;
if (errorCode === "ECONNREFUSED" || errorMessage.includes("ECONNREFUSED")) {
return new AcmeConnectionError({ message: "Connection refused" });
throw new AcmeConnectionError({ message: "Connection refused" });
}
if (errorCode === "ENOTFOUND" || errorMessage.includes("ENOTFOUND")) {
return new AcmeDnsFailureError({ message: "Hostname could not be resolved (DNS failure)" });
throw new AcmeDnsFailureError({ message: "Hostname could not be resolved (DNS failure)" });
}
if (errorCode === "ECONNRESET" || errorMessage.includes("ECONNRESET")) {
throw new AcmeConnectionError({ message: "Connection reset by peer" });
}
if (errorCode === "ECONNABORTED" || errorMessage.includes("timeout")) {
logger.error(exp, "Connection timed out while validating ACME challenge response");
return new AcmeConnectionError({ message: "Connection timed out" });
throw new AcmeConnectionError({ message: "Connection timed out" });
}
logger.error(exp, "Unknown error validating ACME challenge response");
return new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
throw new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
}
if (exp instanceof Error) {
logger.error(exp, "Error validating ACME challenge response");
} else {
logger.error(exp, "Unknown error validating ACME challenge response");
return new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
throw exp;
}
logger.error(exp, "Unknown error validating ACME challenge response");
throw new AcmeServerInternalError({ message: "Unknown error validating ACME challenge response" });
} catch (outterExp) {
await auditLogService.createAuditLog({
projectId: challenge.auth.account.project.id,
actor: {
type: ActorType.ACME_ACCOUNT,
metadata: {
profileId: challenge.auth.account.profileId,
accountId: challenge.auth.account.id
}
},
event: {
type: finalAttempt ? EventType.FAIL_ACME_CHALLENGE : EventType.ATTEMPT_ACME_CHALLENGE,
metadata: {
challengeId,
type: challenge.type as AcmeChallengeType,
retryCount,
errorMessage: exp instanceof Error ? exp.message : "Unknown error"
}
return exp;
}
});
if (error) {
throw error;
throw outterExp;
}
}
};
return { validateChallengeResponse };
return { markChallengeAsReady, validateChallengeResponse };
};

View File

@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { CertificateRequestStatus } from "@app/services/certificate-request/certificate-request-types";
export type TPkiAcmeOrderDALFactory = ReturnType<typeof pkiAcmeOrderDALFactory>;
@@ -19,6 +20,43 @@ export const pkiAcmeOrderDALFactory = (db: TDbClient) => {
}
};
const findWithCertificateRequestForSync = async (id: string, tx?: Knex) => {
try {
const order = await (tx || db)(TableName.PkiAcmeOrder)
.leftJoin(
TableName.CertificateRequests,
`${TableName.PkiAcmeOrder}.id`,
`${TableName.CertificateRequests}.acmeOrderId`
)
.select(
selectAllTableCols(TableName.PkiAcmeOrder),
db.ref("id").withSchema(TableName.CertificateRequests).as("certificateRequestId"),
db.ref("status").withSchema(TableName.CertificateRequests).as("certificateRequestStatus"),
db.ref("certificateId").withSchema(TableName.CertificateRequests).as("certificateId")
)
.forUpdate(TableName.PkiAcmeOrder)
.where(`${TableName.PkiAcmeOrder}.id`, id)
.first();
if (!order) {
return null;
}
const { certificateRequestId, certificateRequestStatus, certificateId, ...details } = order;
return {
...details,
certificateRequest:
certificateRequestId && certificateRequestStatus
? {
id: certificateRequestId,
status: certificateRequestStatus as CertificateRequestStatus,
certificateId
}
: undefined
};
} catch (error) {
throw new DatabaseError({ error, name: "Find PKI ACME order by id with certificate request" });
}
};
const findByAccountAndOrderIdWithAuthorizations = async (accountId: string, orderId: string, tx?: Knex) => {
try {
const rows = await (tx || db)(TableName.PkiAcmeOrder)
@@ -72,6 +110,7 @@ export const pkiAcmeOrderDALFactory = (db: TDbClient) => {
return {
...pkiAcmeOrderOrm,
findByIdForFinalization,
findWithCertificateRequestForSync,
findByAccountAndOrderIdWithAuthorizations,
listByAccountId
};

Some files were not shown because too many files have changed in this diff Show More