Merge branch 'main' of https://github.com/Infisical/infisical into aws-lambda-secret-sync-docs

This commit is contained in:
Piyush Gupta
2025-11-21 19:33:29 +05:30
240 changed files with 8268 additions and 1545 deletions

1
.gitignore vendored
View File

@@ -74,3 +74,4 @@ cli/test/infisical-merge
backend/bdd/.bdd-infisical-bootstrap-result.json
/npm/bin
__pycache__

View File

@@ -54,4 +54,6 @@ k8-operator/config/samples/universalAuthIdentitySecret.yaml:generic-api-key:8
docs/integrations/app-connections/redis.mdx:generic-api-key:80
backend/src/ee/services/app-connections/chef/chef-connection-fns.ts:private-key:42
docs/documentation/platform/pki/enrollment-methods/api.mdx:generic-api-key:93
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139
docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx:private-key:62
docs/documentation/platform/pki/certificate-syncs/chef.mdx:private-key:61

View File

@@ -3,6 +3,7 @@ import os
import pathlib
import typing
from copy import deepcopy
import httpx
from behave.runner import Context
@@ -185,28 +186,33 @@ def bootstrap_infisical(context: Context):
def before_all(context: Context):
base_vars = {
"BASE_URL": BASE_URL,
"PEBBLE_URL": PEBBLE_URL,
}
if BOOTSTRAP_INFISICAL:
details = bootstrap_infisical(context)
context.vars = {
"BASE_URL": BASE_URL,
"PEBBLE_URL": PEBBLE_URL,
vars = base_vars | {
"PROJECT_ID": details["project"]["id"],
"CERT_CA_ID": details["ca"]["id"],
"CERT_TEMPLATE_ID": details["cert_template"]["id"],
"AUTH_TOKEN": details["auth_token"],
}
else:
context.vars = {
"BASE_URL": BASE_URL,
"PEBBLE_URL": PEBBLE_URL,
vars = base_vars | {
"PROJECT_ID": PROJECT_ID,
"CERT_CA_ID": CERT_CA_ID,
"CERT_TEMPLATE_ID": CERT_TEMPLATE_ID,
"AUTH_TOKEN": AUTH_TOKEN,
}
context._initial_vars = vars
context.http_client = httpx.Client(base_url=BASE_URL)
def before_scenario(context: Context, scenario: typing.Any):
context.vars = deepcopy(context._initial_vars)
def after_scenario(context: Context, scenario: typing.Any):
if hasattr(context, "web_server"):
context.web_server.shutdown_and_server_close()

View File

@@ -221,7 +221,6 @@ Feature: Access Control
| order | .authorizations[0].uri | auth_uri | {auth_uri} | |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | {} |
Scenario Outline: URL mismatch
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
@@ -271,3 +270,52 @@ Feature: Access Control
| order | .authorizations[0].uri | auth_uri | {auth_uri} | https://example.com/acmes/auths/FOOBAR | URL mismatch in the protected header |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | BAD | Invalid URL in the protected header |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | https://example.com/acmes/challenges/FOOBAR | URL mismatch in the protected header |
Scenario Outline: Send KID and JWK in the same time
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri with jq "capture("/(?<id>[^/]+)$") | .id" as account_id
When I create certificate signing request as csr
Then I add names to certificate signing request csr
"""
{
"COMMON_NAME": "localhost"
}
"""
Then I create a RSA private key pair as cert_key
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
And I peak and memorize the next nonce as nonce_value
And I memorize <src_var> with jq "<jq>" as <dest_var>
When I send a raw ACME request to "<url>"
"""
{
"protected": {
"alg": "RS256",
"nonce": "{nonce_value}",
"url": "<url>",
"kid": "{acme_account.uri}",
"jwk": {
"n": "mmEWxUv2lUYDZe_M2FXJ_WDXgHoEG7PVvg-dfz1STzyMwx0qvM66KMenXSyVA0r-_Ssb6p8VexSWGOFKskM4ryKUihn2KNH5e8nXZBqzqYeKQ8vqaCdaWzTxFI1dg0xhk0CWptkZHxpRpLalztFJ1Pq7L2qvQOM2YT7wPYbwQhpaSiVNXAb1W4FwAPyC04v1mHehvST-esaDT7j_5-eU5cCcmyi4_g5nBawcinOjj5o3VCg4X8UjK--AjhAyYHx1nRMr-7xk4x-0VIpQ_OODjLB3WzN8s1YEb0Jx5Bv1JyeCw35zahqs3fAFyRje-p5ENk9NCxfz5x9ZGkszkkNt0Q",
"e": "AQAB",
"kty": "RSA"
}
},
"payload": {}
}
"""
Then the value response.status_code should be equal to 400
And the value response with jq ".status" should be equal to 400
And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:malformed"
And the value response with jq ".detail" should be equal to "Both JWK and KID are provided in the protected header"
Examples: Endpoints
| src_var | jq | dest_var | url |
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/{account_id}/orders |
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order |
| order | . | not_used | {order.uri} |
| order | . | not_used | {order.uri}/finalize |
| order | . | not_used | {order.uri}/certificate |
| order | .authorizations[0].uri | auth_uri | {auth_uri} |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} |

View File

@@ -6,13 +6,32 @@ Feature: Account
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And the value acme_account.uri with jq "." should match pattern {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/(.+)
Scenario: Create a new account with the same key pair twice
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri as kid
And I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account2
And the value error.__class__.__name__ should be equal to "ConflictError"
And the value error.location should be equal to "{kid}"
Scenario: Find an existing account
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri as account_uri
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And the value acme_account.uri should be equal to "{account_uri}"
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as retrieved_account
And the value retrieved_account.uri should be equal to "{account_uri}"
# Note: This is a very special case for cert-manager.
Scenario: Create a new account with EAB then retrieve it without EAB
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri as account_uri
And I find the existing ACME account without EAB as retrieved_account
And the value error with should be absent
And the value retrieved_account.uri should be equal to "{account_uri}"
Scenario: Create a new account without EAB
Given I have an ACME cert profile as "acme_profile"

View File

@@ -9,6 +9,9 @@ Feature: Directory
{
"newNonce": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-nonce",
"newAccount": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-account",
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order"
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order",
"meta": {
"externalAccountRequired": true
}
}
"""

View File

@@ -387,6 +387,9 @@ def register_account_with_eab(
):
acme_client = context.acme_client
account_public_key = acme_client.net.key.public_key()
if not only_return_existing:
# clear the account in case if we want to register twice
acme_client.net.account = None
if hasattr(context, "alt_eab_url"):
eab_directory = messages.Directory.from_json(
{"newAccount": context.alt_eab_url}
@@ -406,8 +409,14 @@ def register_account_with_eab(
only_return_existing=only_return_existing,
)
try:
context.vars[account_var] = acme_client.new_account(registration)
if not only_return_existing:
context.vars[account_var] = acme_client.new_account(registration)
else:
context.vars[account_var] = acme_client.query_registration(
acme_client.net.account
)
except Exception as exp:
logger.error(f"Failed to register: {exp}", exc_info=True)
context.vars["error"] = exp
@@ -434,6 +443,17 @@ def step_impl(context: Context, email: str, kid: str, secret: str, account_var:
)
@then("I find the existing ACME account without EAB as {account_var}")
def step_impl(context: Context, account_var: str):
acme_client = context.acme_client
# registration = messages.RegistrationResource.from_json(dict(uri=""))
registration = acme_client.net.account
try:
context.vars[account_var] = acme_client.query_registration(registration)
except Exception as exp:
context.vars["error"] = exp
@then("I register a new ACME account with email {email} without EAB")
def step_impl(context: Context, email: str):
acme_client = context.acme_client
@@ -600,6 +620,19 @@ def step_impl(context: Context, var_path: str, jq_query: str):
)
@then("the value {var_path} with should be absent")
def step_impl(context: Context, var_path: str):
try:
value = eval_var(context, var_path)
except Exception as exp:
if isinstance(exp, KeyError):
return
raise
assert False, (
f"value at {var_path!r} should be absent, but we got this instead: {value!r}"
)
@then('the value {var_path} with jq "{jq_query}" should be equal to {expected}')
def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
value, result = apply_value_with_jq(
@@ -615,13 +648,14 @@ def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
@then('the value {var_path} with jq "{jq_query}" should match pattern {regex}')
def step_impl(context: Context, var_path: str, jq_query: str, regex: str):
actual_regex = replace_vars(regex, context.vars)
value, result = apply_value_with_jq(
context=context,
var_path=var_path,
jq_query=jq_query,
)
assert re.match(replace_vars(regex, context.vars), result), (
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {regex!r}"
assert re.match(actual_regex, result), (
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {actual_regex!r}"
)

View File

@@ -15,6 +15,7 @@ from josepy import JSONObjectWithFields
ACC_KEY_BITS = 2048
ACC_KEY_PUBLIC_EXPONENT = 65537
NOCK_API_PREFIX = "/api/__bdd_nock__"
logger = logging.getLogger(__name__)
faker = Faker()
@@ -265,7 +266,7 @@ def x509_cert_to_dict(cert: x509.Certificate) -> dict:
def define_nock(context: Context, definitions: list[dict]):
jwt_token = context.vars["AUTH_TOKEN"]
response = context.http_client.post(
"/api/v1/bdd-nock/define",
f"{NOCK_API_PREFIX}/define",
headers=dict(authorization="Bearer {}".format(jwt_token)),
json=dict(definitions=definitions),
)
@@ -275,7 +276,7 @@ def define_nock(context: Context, definitions: list[dict]):
def restore_nock(context: Context):
jwt_token = context.vars["AUTH_TOKEN"]
response = context.http_client.post(
"/api/v1/bdd-nock/restore",
f"{NOCK_API_PREFIX}/restore",
headers=dict(authorization="Bearer {}".format(jwt_token)),
json=dict(),
)
@@ -285,7 +286,7 @@ def restore_nock(context: Context):
def clean_all_nock(context: Context):
jwt_token = context.vars["AUTH_TOKEN"]
response = context.http_client.post(
"/api/v1/bdd-nock/clean-all",
f"{NOCK_API_PREFIX}/clean-all",
headers=dict(authorization="Bearer {}".format(jwt_token)),
json=dict(),
)

View File

@@ -1,7 +1,12 @@
import { seedData1 } from "@app/db/seed-data";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
const createPolicy = async (dto: {
name: string;
secretPath: string;
approvers: { type: ApproverType.User; id: string }[];
approvals: number;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-approvals`,
@@ -27,7 +32,7 @@ describe("Secret approval policy router", async () => {
const policy = await createPolicy({
secretPath: "/",
approvals: 1,
approvers: [{id:seedData1.id, type: ApproverType.User}],
approvers: [{ id: seedData1.id, type: ApproverType.User }],
name: "test-policy"
});

View File

@@ -1,6 +1,8 @@
{
"watch": ["src"],
"watch": [
"src"
],
"ext": ".ts,.js",
"ignore": [],
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
}
"exec": "tsx --tsconfig=./tsconfig.dev.json --inspect=0.0.0.0:9229 ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
}

View File

@@ -128,6 +128,7 @@
"sjcl": "^1.0.8",
"smee-client": "^2.0.0",
"snowflake-sdk": "^1.14.0",
"ssh2": "^1.17.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
@@ -164,6 +165,7 @@
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/ssh2": "^1.15.5",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
@@ -15634,6 +15636,33 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/ssh2": {
"version": "1.15.5",
"resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.5.tgz",
"integrity": "sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "^18.11.18"
}
},
"node_modules/@types/ssh2/node_modules/@types/node": {
"version": "18.19.130",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz",
"integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/@types/ssh2/node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/sshpk": {
"version": "1.10.3",
"resolved": "https://registry.npmjs.org/@types/sshpk/-/sshpk-1.10.3.tgz",
@@ -18061,6 +18090,15 @@
"dev": true,
"license": "MIT"
},
"node_modules/buildcheck": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz",
"integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==",
"optional": true,
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/bullmq": {
"version": "5.4.2",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.4.2.tgz",
@@ -18901,6 +18939,20 @@
"node": ">= 0.10"
}
},
"node_modules/cpu-features": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
"integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
"hasInstallScript": true,
"optional": true,
"dependencies": {
"buildcheck": "~0.0.6",
"nan": "^2.19.0"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/create-hash": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
@@ -24996,9 +25048,9 @@
}
},
"node_modules/nan": {
"version": "2.22.2",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz",
"integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==",
"version": "2.23.1",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.23.1.tgz",
"integrity": "sha512-r7bBUGKzlqk8oPBDYxt6Z0aEdF1G1rwlMcLk8LCOMbOzf0mG+JUfUzG4fIMWwHWP0iyaLWEQZJmtB7nOHEm/qw==",
"license": "MIT"
},
"node_modules/nanoid": {
@@ -31492,6 +31544,23 @@
"node": ">= 0.6"
}
},
"node_modules/ssh2": {
"version": "1.17.0",
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz",
"integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==",
"hasInstallScript": true,
"dependencies": {
"asn1": "^0.2.6",
"bcrypt-pbkdf": "^1.0.2"
},
"engines": {
"node": ">=10.16.0"
},
"optionalDependencies": {
"cpu-features": "~0.0.10",
"nan": "^2.23.0"
}
},
"node_modules/sshpk": {
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",

View File

@@ -32,7 +32,7 @@
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev": "tsx watch --clear-screen=false ./src/main.ts --config tsconfig.dev.json | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup --sourcemap",
"build:frontend": "npm run build --prefix ../frontend",
@@ -110,6 +110,7 @@
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/ssh2": "^1.15.5",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
@@ -257,6 +258,7 @@
"sjcl": "^1.0.8",
"smee-client": "^2.0.0",
"snowflake-sdk": "^1.14.0",
"ssh2": "^1.17.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
@@ -264,4 +266,4 @@
"zod": "^3.22.4",
"zod-to-json-schema": "^3.24.5"
}
}
}

View File

@@ -2,7 +2,7 @@
import { execSync } from "child_process";
import path from "path";
import promptSync from "prompt-sync";
import slugify from "@sindresorhus/slugify"
import slugify from "@sindresorhus/slugify";
const prompt = promptSync({ sigint: true });

View File

@@ -14,13 +14,16 @@ export async function up(knex: Knex): Promise<void> {
if (rows.length > 0) {
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
const ids = batch.map((row) => row.id);
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SecretApprovalPolicy)
.whereIn(
"id",
batch.map((row) => row.id)
)
.update({ shouldCheckSecretPermission: true });
await knex.raw(
`
UPDATE ??
SET ?? = true
WHERE ?? IN (${ids.map(() => "?").join(",")})
`,
[TableName.SecretApprovalPolicy, "shouldCheckSecretPermission", "id", ids]
);
}
}
}

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
import { TableName } from "@app/db/schemas";
const CONSTRAINT_NAME = "unique_pki_acme_account_public_key_and_profile_id";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
if (hasProfileId && hasPublicKeyThumbprint) {
await knex.schema.alterTable(TableName.PkiAcmeAccount, (table) => {
table.unique(["profileId", "publicKeyThumbprint"], { indexName: CONSTRAINT_NAME });
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
await knex.schema.alterTable(TableName.PkiAcmeAccount, async () => {
if (hasProfileId && hasPublicKeyThumbprint) {
await dropConstraintIfExists(TableName.PkiAcmeAccount, CONSTRAINT_NAME, knex);
}
});
}
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("shouldCheckSecretPermission");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("shouldCheckSecretPermission").nullable();
});
}
}

View File

@@ -17,8 +17,7 @@ export const SecretApprovalPoliciesSchema = z.object({
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true),
shouldCheckSecretPermission: z.boolean().nullable().optional()
allowedSelfApprovals: z.boolean().default(true)
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@@ -1,8 +1,14 @@
import { z } from "zod";
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { GroupsSchema, OrgMembershipRole, ProjectsSchema, UsersSchema } from "@app/db/schemas";
import {
EFilterReturnedProjects,
EFilterReturnedUsers,
EGroupProjectsOrderBy
} from "@app/ee/services/group/group-types";
import { ApiDocsTags, GROUPS } from "@app/lib/api-docs";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -11,6 +17,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
method: "POST",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
@@ -40,6 +49,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/:id",
method: "GET",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
@@ -69,6 +81,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
method: "GET",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
@@ -93,6 +108,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/:id",
method: "PATCH",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
@@ -128,6 +146,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/:id",
method: "DELETE",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
@@ -155,6 +176,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:id/users",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
@@ -163,7 +187,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
id: z.string().trim().describe(GROUPS.LIST_USERS.id)
}),
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
@@ -203,9 +227,72 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/:id/projects",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.Groups],
params: z.object({
id: z.string().trim().describe(GROUPS.LIST_PROJECTS.id)
}),
querystring: z.object({
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_PROJECTS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_PROJECTS.limit),
search: z.string().trim().optional().describe(GROUPS.LIST_PROJECTS.search),
filter: z.nativeEnum(EFilterReturnedProjects).optional().describe(GROUPS.LIST_PROJECTS.filterProjects),
orderBy: z
.nativeEnum(EGroupProjectsOrderBy)
.default(EGroupProjectsOrderBy.Name)
.describe(GROUPS.LIST_PROJECTS.orderBy),
orderDirection: z
.nativeEnum(OrderByDirection)
.default(OrderByDirection.ASC)
.describe(GROUPS.LIST_PROJECTS.orderDirection)
}),
response: {
200: z.object({
projects: ProjectsSchema.pick({
id: true,
name: true,
slug: true,
description: true,
type: true
})
.merge(
z.object({
joinedGroupAt: z.date().nullable()
})
)
.array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { projects, totalCount } = await server.services.group.listGroupProjects({
id: req.params.id,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { projects, totalCount };
}
});
server.route({
method: "POST",
url: "/:id/users/:username",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
@@ -241,6 +328,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
method: "DELETE",
url: "/:id/users/:username",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,

View File

@@ -9,6 +9,11 @@ import {
SanitizedPostgresAccountWithResourceSchema,
UpdatePostgresAccountSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import {
CreateSSHAccountSchema,
SanitizedSSHAccountWithResourceSchema,
UpdateSSHAccountSchema
} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { registerPamResourceEndpoints } from "./pam-account-endpoints";
@@ -30,5 +35,14 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fasti
createAccountSchema: CreateMySQLAccountSchema,
updateAccountSchema: UpdateMySQLAccountSchema
});
},
[PamResource.SSH]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.SSH,
accountResponseSchema: SanitizedSSHAccountWithResourceSchema,
createAccountSchema: CreateSSHAccountSchema,
updateAccountSchema: UpdateSSHAccountSchema
});
}
};

View File

@@ -2,16 +2,21 @@ import { z } from "zod";
import { PamFoldersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PamAccountOrderBy, PamAccountView } from "@app/ee/services/pam-account/pam-account-enums";
import { SanitizedMySQLAccountWithResourceSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums";
import { SanitizedPostgresAccountWithResourceSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { SanitizedSSHAccountWithResourceSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedAccountSchema = z.union([
SanitizedSSHAccountWithResourceSchema, // ORDER MATTERS
SanitizedPostgresAccountWithResourceSchema,
SanitizedMySQLAccountWithResourceSchema
]);
@@ -26,33 +31,69 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
schema: {
description: "List PAM accounts",
querystring: z.object({
projectId: z.string().uuid()
projectId: z.string().uuid(),
accountPath: z.string().trim().default("/").transform(removeTrailingSlash),
accountView: z.nativeEnum(PamAccountView).default(PamAccountView.Flat),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(100),
orderBy: z.nativeEnum(PamAccountOrderBy).default(PamAccountOrderBy.Name),
orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC),
search: z.string().trim().optional(),
filterResourceIds: z
.string()
.transform((val) =>
val
.split(",")
.map((s) => s.trim())
.filter(Boolean)
)
.optional()
}),
response: {
200: z.object({
accounts: SanitizedAccountSchema.array(),
folders: PamFoldersSchema.array()
folders: PamFoldersSchema.array(),
totalCount: z.number().default(0),
folderId: z.string().optional(),
folderPaths: z.record(z.string(), z.string())
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const response = await server.services.pamAccount.list(req.query.projectId, req.permission);
const { projectId, accountPath, accountView, limit, offset, search, orderBy, orderDirection, filterResourceIds } =
req.query;
const { accounts, folders, totalCount, folderId, folderPaths } = await server.services.pamAccount.list({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId,
accountPath,
accountView,
limit,
offset,
search,
orderBy,
orderDirection,
filterResourceIds
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: req.query.projectId,
projectId,
event: {
type: EventType.PAM_ACCOUNT_LIST,
metadata: {
accountCount: response.accounts.length,
folderCount: response.folders.length
accountCount: accounts.length,
folderCount: folders.length
}
}
});
return response;
return { accounts, folders, totalCount, folderId, folderPaths };
}
});
@@ -93,7 +134,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
gatewayClientPrivateKey: z.string(),
gatewayServerCertificateChain: z.string(),
relayHost: z.string(),
metadata: z.record(z.string(), z.string()).optional()
metadata: z.record(z.string(), z.string().optional()).optional()
})
}
},

View File

@@ -9,6 +9,11 @@ import {
SanitizedPostgresResourceSchema,
UpdatePostgresResourceSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import {
CreateSSHResourceSchema,
SanitizedSSHResourceSchema,
UpdateSSHResourceSchema
} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { registerPamResourceEndpoints } from "./pam-resource-endpoints";
@@ -30,5 +35,14 @@ export const PAM_RESOURCE_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fast
createResourceSchema: CreateMySQLResourceSchema,
updateResourceSchema: UpdateMySQLResourceSchema
});
},
[PamResource.SSH]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.SSH,
resourceResponseSchema: SanitizedSSHResourceSchema,
createResourceSchema: CreateSSHResourceSchema,
updateResourceSchema: UpdateSSHResourceSchema
});
}
};

View File

@@ -5,19 +5,30 @@ import {
MySQLResourceListItemSchema,
SanitizedMySQLResourceSchema
} from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResourceOrderBy } from "@app/ee/services/pam-resource/pam-resource-enums";
import {
PostgresResourceListItemSchema,
SanitizedPostgresResourceSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import {
SanitizedSSHResourceSchema,
SSHResourceListItemSchema
} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { OrderByDirection } from "@app/lib/types";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedResourceSchema = z.union([SanitizedPostgresResourceSchema, SanitizedMySQLResourceSchema]);
const SanitizedResourceSchema = z.union([
SanitizedPostgresResourceSchema,
SanitizedMySQLResourceSchema,
SanitizedSSHResourceSchema
]);
const ResourceOptionsSchema = z.discriminatedUnion("resource", [
PostgresResourceListItemSchema,
MySQLResourceListItemSchema
MySQLResourceListItemSchema,
SSHResourceListItemSchema
]);
export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
@@ -52,17 +63,46 @@ export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
schema: {
description: "List PAM resources",
querystring: z.object({
projectId: z.string().uuid()
projectId: z.string().uuid(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(100),
orderBy: z.nativeEnum(PamResourceOrderBy).default(PamResourceOrderBy.Name),
orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC),
search: z.string().trim().optional(),
filterResourceTypes: z
.string()
.transform((val) =>
val
.split(",")
.map((s) => s.trim())
.filter(Boolean)
)
.optional()
}),
response: {
200: z.object({
resources: SanitizedResourceSchema.array()
resources: SanitizedResourceSchema.array(),
totalCount: z.number().default(0)
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const response = await server.services.pamResource.list(req.query.projectId, req.permission);
const { projectId, limit, offset, search, orderBy, orderDirection, filterResourceTypes } = req.query;
const { resources, totalCount } = await server.services.pamResource.list({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId,
limit,
offset,
search,
orderBy,
orderDirection,
filterResourceTypes
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
@@ -71,12 +111,12 @@ export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
event: {
type: EventType.PAM_RESOURCE_LIST,
metadata: {
count: response.resources.length
count: resources.length
}
}
});
return response;
return { resources, totalCount };
}
});
};

View File

@@ -4,12 +4,21 @@ import { PamSessionsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { MySQLSessionCredentialsSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PostgresSessionCredentialsSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { PamSessionCommandLogSchema, SanitizedSessionSchema } from "@app/ee/services/pam-session/pam-session-schemas";
import { SSHSessionCredentialsSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import {
PamSessionCommandLogSchema,
SanitizedSessionSchema,
TerminalEventSchema
} from "@app/ee/services/pam-session/pam-session-schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SessionCredentialsSchema = z.union([PostgresSessionCredentialsSchema, MySQLSessionCredentialsSchema]);
const SessionCredentialsSchema = z.union([
SSHSessionCredentialsSchema,
PostgresSessionCredentialsSchema,
MySQLSessionCredentialsSchema
]);
export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
// Meant to be hit solely by gateway identities
@@ -32,17 +41,15 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { credentials, projectId, account } = await server.services.pamAccount.getSessionCredentials(
req.params.sessionId,
req.permission
);
const { credentials, projectId, account, sessionStarted } =
await server.services.pamAccount.getSessionCredentials(req.params.sessionId, req.permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event: {
type: EventType.PAM_SESSION_START,
type: EventType.PAM_SESSION_CREDENTIALS_GET,
metadata: {
sessionId: req.params.sessionId,
accountName: account.name
@@ -50,7 +57,22 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
}
});
return { credentials };
if (sessionStarted) {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event: {
type: EventType.PAM_SESSION_START,
metadata: {
sessionId: req.params.sessionId,
accountName: account.name
}
}
});
}
return { credentials: credentials as z.infer<typeof SessionCredentialsSchema> };
}
});
@@ -67,7 +89,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
sessionId: z.string().uuid()
}),
body: z.object({
logs: PamSessionCommandLogSchema.array()
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
}),
response: {
200: z.object({

View File

@@ -305,8 +305,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean(),
shouldCheckSecretPermission: z.boolean().nullable().optional()
allowedSelfApprovals: z.boolean()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),

View File

@@ -27,6 +27,17 @@ export const getChefServerUrl = async (serverUrl?: string) => {
return chefServerUrl;
};
const buildSecureUrl = (baseUrl: string, path: string): string => {
try {
const url = new URL(path, baseUrl);
return url.toString();
} catch (error) {
throw new BadRequestError({
message: "Invalid URL construction parameters"
});
}
};
// Helper to ensure private key is in proper PEM format
const formatPrivateKey = (key: string): string => {
let formattedKey = key.trim();
@@ -138,7 +149,8 @@ export const validateChefConnectionCredentials = async (config: TChefConnectionC
const headers = getChefAuthHeaders("GET", path, "", inputCredentials.userName, inputCredentials.privateKey);
await request.get(`${hostServerUrl}${path}`, {
const secureUrl = buildSecureUrl(hostServerUrl, path);
await request.get(secureUrl, {
headers
});
} catch (error: unknown) {
@@ -168,7 +180,8 @@ export const listChefDataBags = async (appConnection: TChefConnection): Promise<
const headers = getChefAuthHeaders("GET", path, body, userName, privateKey);
const res = await request.get<Record<string, string>>(`${hostServerUrl}${path}`, {
const secureUrl = buildSecureUrl(hostServerUrl, path);
const res = await request.get<Record<string, string>>(secureUrl, {
headers
});
@@ -203,7 +216,8 @@ export const listChefDataBagItems = async (
const headers = getChefAuthHeaders("GET", path, body, userName, privateKey);
const res = await request.get<Record<string, string>>(`${hostServerUrl}${path}`, {
const secureUrl = buildSecureUrl(hostServerUrl, path);
const res = await request.get<Record<string, string>>(secureUrl, {
headers
});
@@ -238,7 +252,8 @@ export const getChefDataBagItem = async ({
const headers = getChefAuthHeaders("GET", path, body, userName, privateKey);
const res = await request.get<TChefDataBagItemContent>(`${hostServerUrl}${path}`, {
const secureUrl = buildSecureUrl(hostServerUrl, path);
const res = await request.get<TChefDataBagItemContent>(secureUrl, {
headers
});
@@ -255,6 +270,38 @@ export const getChefDataBagItem = async ({
}
};
export const createChefDataBagItem = async ({
serverUrl,
userName,
privateKey,
orgName,
dataBagName,
data
}: Omit<TUpdateChefDataBagItem, "dataBagItemName">): Promise<void> => {
try {
const path = `/organizations/${orgName}/data/${dataBagName}`;
const body = JSON.stringify(data);
const hostServerUrl = await getChefServerUrl(serverUrl);
const headers = getChefAuthHeaders("POST", path, body, userName, privateKey);
const secureUrl = buildSecureUrl(hostServerUrl, path);
await request.post(secureUrl, data, {
headers
});
} catch (error) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to create Chef data bag item: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to create Chef data bag item"
});
}
};
export const updateChefDataBagItem = async ({
serverUrl,
userName,
@@ -272,7 +319,8 @@ export const updateChefDataBagItem = async ({
const headers = getChefAuthHeaders("PUT", path, body, userName, privateKey);
await request.put(`${hostServerUrl}${path}`, data, {
const secureUrl = buildSecureUrl(hostServerUrl, path);
await request.put(secureUrl, data, {
headers
});
} catch (error) {
@@ -286,3 +334,35 @@ export const updateChefDataBagItem = async ({
});
}
};
export const removeChefDataBagItem = async ({
serverUrl,
userName,
privateKey,
orgName,
dataBagName,
dataBagItemName
}: Omit<TUpdateChefDataBagItem, "data">): Promise<void> => {
try {
const path = `/organizations/${orgName}/data/${dataBagName}/${dataBagItemName}`;
const body = "";
const hostServerUrl = await getChefServerUrl(serverUrl);
const headers = getChefAuthHeaders("DELETE", path, body, userName, privateKey);
const secureUrl = buildSecureUrl(hostServerUrl, path);
await request.delete(secureUrl, {
headers
});
} catch (error) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to remove Chef data bag item: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to remove Chef data bag item"
});
}
};

View File

@@ -186,6 +186,7 @@ export enum EventType {
CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth",
UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth",
GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth",
GET_TOKEN_IDENTITY_TOKEN_AUTH = "get-token-identity-token-auth",
ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth",
UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth",
@@ -535,6 +536,7 @@ export enum EventType {
DASHBOARD_GET_SECRET_VALUE = "dashboard-get-secret-value",
DASHBOARD_GET_SECRET_VERSION_VALUE = "dashboard-get-secret-version-value",
PAM_SESSION_CREDENTIALS_GET = "pam-session-credentials-get",
PAM_SESSION_START = "pam-session-start",
PAM_SESSION_LOGS_UPDATE = "pam-session-logs-update",
PAM_SESSION_END = "pam-session-end",
@@ -1029,6 +1031,15 @@ interface GetTokensIdentityTokenAuthEvent {
};
}
interface GetTokenIdentityTokenAuthEvent {
type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
identityName: string;
tokenId: string;
};
}
interface AddIdentityTokenAuthEvent {
type: EventType.ADD_IDENTITY_TOKEN_AUTH;
metadata: {
@@ -3978,6 +3989,14 @@ interface OrgRoleDeleteEvent {
};
}
interface PamSessionCredentialsGetEvent {
type: EventType.PAM_SESSION_CREDENTIALS_GET;
metadata: {
sessionId: string;
accountName: string;
};
}
interface PamSessionStartEvent {
type: EventType.PAM_SESSION_START;
metadata: {
@@ -4214,6 +4233,7 @@ export type Event =
| CreateTokenIdentityTokenAuthEvent
| UpdateTokenIdentityTokenAuthEvent
| GetTokensIdentityTokenAuthEvent
| GetTokenIdentityTokenAuthEvent
| AddIdentityTokenAuthEvent
| UpdateIdentityTokenAuthEvent
| GetIdentityTokenAuthEvent
@@ -4531,6 +4551,7 @@ export type Event =
| OrgRoleCreateEvent
| OrgRoleUpdateEvent
| OrgRoleDeleteEvent
| PamSessionCredentialsGetEvent
| PamSessionStartEvent
| PamSessionLogsUpdateEvent
| PamSessionEndEvent

View File

@@ -4,8 +4,9 @@ import { TDbClient } from "@app/db";
import { AccessScope, TableName, TGroups } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { EFilterReturnedUsers } from "./group-types";
import { EFilterReturnedProjects, EFilterReturnedUsers, EGroupProjectsOrderBy } from "./group-types";
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
@@ -166,6 +167,89 @@ export const groupDALFactory = (db: TDbClient) => {
}
};
const findAllGroupProjects = async ({
orgId,
groupId,
offset,
limit,
search,
filter,
orderBy,
orderDirection
}: {
orgId: string;
groupId: string;
offset?: number;
limit?: number;
search?: string;
filter?: EFilterReturnedProjects;
orderBy?: EGroupProjectsOrderBy;
orderDirection?: OrderByDirection;
}) => {
try {
const query = db
.replicaNode()(TableName.Project)
.where(`${TableName.Project}.orgId`, orgId)
.leftJoin(TableName.Membership, (bd) => {
bd.on(`${TableName.Project}.id`, "=", `${TableName.Membership}.scopeProjectId`)
.andOn(`${TableName.Membership}.actorGroupId`, "=", db.raw("?", [groupId]))
.andOn(`${TableName.Membership}.scope`, "=", db.raw("?", [AccessScope.Project]));
})
.select(
db.ref("id").withSchema(TableName.Project),
db.ref("name").withSchema(TableName.Project),
db.ref("slug").withSchema(TableName.Project),
db.ref("description").withSchema(TableName.Project),
db.ref("type").withSchema(TableName.Project),
db.ref("createdAt").withSchema(TableName.Membership).as("joinedGroupAt"),
db.raw(`count(*) OVER() as "totalCount"`)
)
.offset(offset ?? 0);
if (orderBy) {
void query.orderByRaw(
`LOWER(${TableName.Project}.??) ${orderDirection === OrderByDirection.ASC ? "asc" : "desc"}`,
[orderBy]
);
}
if (limit) {
void query.limit(limit);
}
if (search) {
void query.andWhereRaw(
`CONCAT_WS(' ', "${TableName.Project}"."name", "${TableName.Project}"."slug", "${TableName.Project}"."description") ilike ?`,
[`%${search}%`]
);
}
switch (filter) {
case EFilterReturnedProjects.ASSIGNED_PROJECTS:
void query.whereNotNull(`${TableName.Membership}.id`);
break;
case EFilterReturnedProjects.UNASSIGNED_PROJECTS:
void query.whereNull(`${TableName.Membership}.id`);
break;
default:
break;
}
const projects = await query;
return {
projects: projects.map(({ joinedGroupAt, ...project }) => ({
...project,
joinedGroupAt
})),
// @ts-expect-error col select is raw and not strongly typed
totalCount: Number(projects?.[0]?.totalCount ?? 0)
};
} catch (error) {
throw new DatabaseError({ error, name: "Find all group projects" });
}
};
const findGroupsByProjectId = async (projectId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.Groups)
@@ -230,6 +314,7 @@ export const groupDALFactory = (db: TDbClient) => {
findGroups,
findByOrgId,
findAllGroupPossibleMembers,
findAllGroupProjects,
findGroupsByProjectId,
findById,
findOne

View File

@@ -24,6 +24,7 @@ import {
TCreateGroupDTO,
TDeleteGroupDTO,
TGetGroupByIdDTO,
TListGroupProjectsDTO,
TListGroupUsersDTO,
TRemoveUserFromGroupDTO,
TUpdateGroupDTO
@@ -34,7 +35,14 @@ type TGroupServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findUserByUsername">;
groupDAL: Pick<
TGroupDALFactory,
"create" | "findOne" | "update" | "delete" | "findAllGroupPossibleMembers" | "findById" | "transaction"
| "create"
| "findOne"
| "update"
| "delete"
| "findAllGroupPossibleMembers"
| "findById"
| "transaction"
| "findAllGroupProjects"
>;
membershipGroupDAL: Pick<TMembershipGroupDALFactory, "find" | "findOne" | "create">;
membershipRoleDAL: Pick<TMembershipRoleDALFactory, "create" | "delete">;
@@ -367,6 +375,55 @@ export const groupServiceFactory = ({
return { users: members, totalCount };
};
const listGroupProjects = async ({
id,
offset,
limit,
search,
filter,
orderBy,
orderDirection,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TListGroupProjectsDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission } = await permissionService.getOrgPermission({
scope: OrganizationActionScope.Any,
actor,
actorId,
orgId: actorOrgId,
actorAuthMethod,
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findOne({
orgId: actorOrgId,
id
});
if (!group)
throw new NotFoundError({
message: `Failed to find group with ID ${id}`
});
const { projects, totalCount } = await groupDAL.findAllGroupProjects({
orgId: group.orgId,
groupId: group.id,
offset,
limit,
search,
filter,
orderBy,
orderDirection
});
return { projects, totalCount };
};
const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
@@ -535,6 +592,7 @@ export const groupServiceFactory = ({
updateGroup,
deleteGroup,
listGroupUsers,
listGroupProjects,
addUserToGroup,
removeUserFromGroup,
getGroupById

View File

@@ -2,7 +2,7 @@ import { Knex } from "knex";
import { TGroups } from "@app/db/schemas";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { TGenericPermission } from "@app/lib/types";
import { OrderByDirection, TGenericPermission } from "@app/lib/types";
import { TMembershipGroupDALFactory } from "@app/services/membership-group/membership-group-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
@@ -42,6 +42,16 @@ export type TListGroupUsersDTO = {
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TListGroupProjectsDTO = {
id: string;
offset: number;
limit: number;
search?: string;
filter?: EFilterReturnedProjects;
orderBy?: EGroupProjectsOrderBy;
orderDirection?: OrderByDirection;
} & TGenericPermission;
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
projectId: string;
};
@@ -111,3 +121,12 @@ export enum EFilterReturnedUsers {
EXISTING_MEMBERS = "existingMembers",
NON_MEMBERS = "nonMembers"
}
export enum EFilterReturnedProjects {
ASSIGNED_PROJECTS = "assignedProjects",
UNASSIGNED_PROJECTS = "unassignedProjects"
}
export enum EGroupProjectsOrderBy {
Name = "name"
}

View File

@@ -39,3 +39,9 @@ export const getDefaultOnPremFeatures = () => {
};
export const setupLicenseRequestWithStore = () => {};
export const getLicenseKeyConfig = () => {
return {
isValid: false
};
};

View File

@@ -1,13 +1,56 @@
import axios, { AxiosError } from "axios";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { getConfig } from "@app/lib/config/env";
import { getConfig, TEnvConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TFeatureSet } from "./license-types";
import { LicenseType, TFeatureSet, TLicenseKeyConfig, TOfflineLicenseContents } from "./license-types";
export const isOfflineLicenseKey = (licenseKey: string): boolean => {
try {
const contents = JSON.parse(Buffer.from(licenseKey, "base64").toString("utf8")) as TOfflineLicenseContents;
return "signature" in contents && "license" in contents;
} catch (error) {
return false;
}
};
export const getLicenseKeyConfig = (
config?: Pick<TEnvConfig, "LICENSE_KEY" | "LICENSE_KEY_OFFLINE">
): TLicenseKeyConfig => {
const cfg = config || getConfig();
if (!cfg) {
return { isValid: false };
}
const licenseKey = cfg.LICENSE_KEY;
if (licenseKey) {
if (isOfflineLicenseKey(licenseKey)) {
return { isValid: true, licenseKey, type: LicenseType.Offline };
}
return { isValid: true, licenseKey, type: LicenseType.Online };
}
const offlineLicenseKey = cfg.LICENSE_KEY_OFFLINE;
// backwards compatibility
if (offlineLicenseKey) {
if (isOfflineLicenseKey(offlineLicenseKey)) {
return { isValid: true, licenseKey: offlineLicenseKey, type: LicenseType.Offline };
}
return { isValid: false };
}
return { isValid: false };
};
export const getDefaultOnPremFeatures = (): TFeatureSet => ({
_id: null,

View File

@@ -22,9 +22,10 @@ import { OrgPermissionBillingActions, OrgPermissionSubjects } from "../permissio
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
import { TLicenseDALFactory } from "./license-dal";
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
import { getDefaultOnPremFeatures, getLicenseKeyConfig, setupLicenseRequestWithStore } from "./license-fns";
import {
InstanceType,
LicenseType,
TAddOrgPmtMethodDTO,
TAddOrgTaxIdDTO,
TCreateOrgPortalSession,
@@ -77,6 +78,7 @@ export const licenseServiceFactory = ({
let instanceType = InstanceType.OnPrem;
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
let selfHostedLicense: TOfflineLicense | null = null;
const licenseKeyConfig = getLicenseKeyConfig(envConfig);
const licenseServerCloudApi = setupLicenseRequestWithStore(
envConfig.LICENSE_SERVER_URL || "",
@@ -85,10 +87,13 @@ export const licenseServiceFactory = ({
envConfig.INTERNAL_REGION
);
const onlineLicenseKey =
licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online ? licenseKeyConfig.licenseKey : "";
const licenseServerOnPremApi = setupLicenseRequestWithStore(
envConfig.LICENSE_SERVER_URL || "",
LICENSE_SERVER_ON_PREM_LOGIN,
envConfig.LICENSE_KEY || "",
onlineLicenseKey,
envConfig.INTERNAL_REGION
);
@@ -131,7 +136,7 @@ export const licenseServiceFactory = ({
return;
}
if (envConfig.LICENSE_KEY) {
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online) {
const token = await licenseServerOnPremApi.refreshLicense();
if (token) {
await syncLicenseKeyOnPremFeatures(true);
@@ -142,10 +147,10 @@ export const licenseServiceFactory = ({
return;
}
if (envConfig.LICENSE_KEY_OFFLINE) {
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline) {
let isValidOfflineLicense = true;
const contents: TOfflineLicenseContents = JSON.parse(
Buffer.from(envConfig.LICENSE_KEY_OFFLINE, "base64").toString("utf8")
Buffer.from(licenseKeyConfig.licenseKey, "base64").toString("utf8")
);
const isVerified = await verifyOfflineLicense(JSON.stringify(contents.license), contents.signature);
@@ -184,7 +189,7 @@ export const licenseServiceFactory = ({
};
const initializeBackgroundSync = async () => {
if (envConfig.LICENSE_KEY) {
if (licenseKeyConfig?.isValid && licenseKeyConfig?.type === LicenseType.Online) {
logger.info("Setting up background sync process for refresh onPremFeatures");
const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures);
job.start();

View File

@@ -136,3 +136,18 @@ export type TDelOrgTaxIdDTO = TOrgPermission & { taxId: string };
export type TOrgInvoiceDTO = TOrgPermission;
export type TOrgLicensesDTO = TOrgPermission;
export enum LicenseType {
Offline = "offline",
Online = "online"
}
export type TLicenseKeyConfig =
| {
isValid: false;
}
| {
isValid: true;
licenseKey: string;
type: LicenseType;
};

View File

@@ -1,46 +1,109 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TPamAccounts } from "@app/db/schemas";
import { buildFindFilter, ormify, prependTableNameToFindFilter, selectAllTableCols } from "@app/lib/knex";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums";
export type TPamAccountDALFactory = ReturnType<typeof pamAccountDALFactory>;
type PamAccountFindFilter = Parameters<typeof buildFindFilter<TPamAccounts>>[0];
export const pamAccountDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.PamAccount);
const findWithResourceDetails = async (filter: PamAccountFindFilter, tx?: Knex) => {
const query = (tx || db.replicaNode())(TableName.PamAccount)
.leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`)
.select(selectAllTableCols(TableName.PamAccount))
.select(
const findByProjectIdWithResourceDetails = async (
{
projectId,
folderId,
accountView = PamAccountView.Nested,
search,
limit,
offset = 0,
orderBy = PamAccountOrderBy.Name,
orderDirection = OrderByDirection.ASC,
filterResourceIds
}: {
projectId: string;
folderId?: string | null;
accountView?: PamAccountView;
search?: string;
limit?: number;
offset?: number;
orderBy?: PamAccountOrderBy;
orderDirection?: OrderByDirection;
filterResourceIds?: string[];
},
tx?: Knex
) => {
try {
const dbInstance = tx || db.replicaNode();
const query = dbInstance(TableName.PamAccount)
.leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`)
.where(`${TableName.PamAccount}.projectId`, projectId);
if (accountView === PamAccountView.Nested) {
if (folderId) {
void query.where(`${TableName.PamAccount}.folderId`, folderId);
} else {
void query.whereNull(`${TableName.PamAccount}.folderId`);
}
}
if (search) {
// escape special characters (`%`, `_`) and the escape character itself (`\`)
const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
const pattern = `%${escapedSearch}%`;
void query.where((q) => {
void q
.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "name", pattern])
.orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "name", pattern])
.orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "description", pattern]);
});
}
if (filterResourceIds && filterResourceIds.length) {
void query.whereIn(`${TableName.PamAccount}.resourceId`, filterResourceIds);
}
const countQuery = query.clone().count("*", { as: "count" }).first();
void query.select(selectAllTableCols(TableName.PamAccount)).select(
// resource
db.ref("name").withSchema(TableName.PamResource).as("resourceName"),
db.ref("resourceType").withSchema(TableName.PamResource),
db.ref("encryptedRotationAccountCredentials").withSchema(TableName.PamResource)
);
if (filter) {
/* eslint-disable @typescript-eslint/no-misused-promises */
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.PamAccount, filter)));
const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
void query.orderByRaw(`${TableName.PamAccount}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]);
if (typeof limit === "number") {
void query.limit(limit).offset(offset);
}
const [results, countResult] = await Promise.all([query, countQuery]);
const totalCount = Number(countResult?.count || 0);
const accounts = results.map(
// @ts-expect-error resourceName, resourceType, encryptedRotationAccountCredentials are from joined table
({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, ...account }) => ({
...account,
resourceId,
resource: {
id: resourceId,
name: resourceName as string,
resourceType,
encryptedRotationAccountCredentials
}
})
);
return { accounts, totalCount };
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM accounts with resource details" });
}
const accounts = await query;
return accounts.map(
({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, ...account }) => ({
...account,
resourceId,
resource: {
id: resourceId,
name: resourceName,
resourceType,
encryptedRotationAccountCredentials
}
})
);
};
const findAccountsDueForRotation = async (tx?: Knex) => {
@@ -59,5 +122,9 @@ export const pamAccountDALFactory = (db: TDbClient) => {
return accounts;
};
return { ...orm, findWithResourceDetails, findAccountsDueForRotation };
return {
...orm,
findByProjectIdWithResourceDetails,
findAccountsDueForRotation
};
};

View File

@@ -0,0 +1,8 @@
export enum PamAccountOrderBy {
Name = "name"
}
export enum PamAccountView {
Flat = "flat",
Nested = "nested"
}

View File

@@ -1,6 +1,6 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, OrganizationActionScope, TPamAccounts, TPamResources } from "@app/db/schemas";
import { ActionProjectType, OrganizationActionScope, TPamAccounts, TPamFolders, TPamResources } from "@app/db/schemas";
import { PAM_RESOURCE_FACTORY_MAP } from "@app/ee/services/pam-resource/pam-resource-factory";
import { decryptResource, decryptResourceConnectionDetails } from "@app/ee/services/pam-resource/pam-resource-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
@@ -27,12 +27,14 @@ import { getFullPamFolderPath } from "../pam-folder/pam-folder-fns";
import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal";
import { PamResource } from "../pam-resource/pam-resource-enums";
import { TPamAccountCredentials } from "../pam-resource/pam-resource-types";
import { TSqlResourceConnectionDetails } from "../pam-resource/shared/sql/sql-resource-types";
import { TPamSessionDALFactory } from "../pam-session/pam-session-dal";
import { PamSessionStatus } from "../pam-session/pam-session-enums";
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPamAccountDALFactory } from "./pam-account-dal";
import { PamAccountView } from "./pam-account-enums";
import { decryptAccount, decryptAccountCredentials, encryptAccountCredentials } from "./pam-account-fns";
import { TAccessAccountDTO, TCreateAccountDTO, TUpdateAccountDTO } from "./pam-account-types";
import { TAccessAccountDTO, TCreateAccountDTO, TListAccountsDTO, TUpdateAccountDTO } from "./pam-account-types";
type TPamAccountServiceFactoryDep = {
pamResourceDAL: TPamResourceDALFactory;
@@ -251,17 +253,17 @@ export const pamAccountServiceFactory = ({
gatewayV2Service
);
// Logic to prevent overwriting unedited censored values
const finalCredentials = { ...credentials };
if (credentials.password === "__INFISICAL_UNCHANGED__") {
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
projectId: account.projectId,
kmsService
});
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
projectId: account.projectId,
kmsService
});
finalCredentials.password = decryptedCredentials.password;
}
// Logic to prevent overwriting unedited censored values
const finalCredentials = await factory.handleOverwritePreventionForCensoredValues(
credentials,
decryptedCredentials
);
const validatedCredentials = await factory.validateAccountCredentials(finalCredentials);
const encryptedCredentials = await encryptAccountCredentials({
@@ -334,21 +336,96 @@ export const pamAccountServiceFactory = ({
};
};
const list = async (projectId: string, actor: OrgServiceActor) => {
const list = async ({
projectId,
accountPath,
accountView,
actor,
actorId,
actorAuthMethod,
actorOrgId,
...params
}: TListAccountsDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorAuthMethod: actor.authMethod,
actorId: actor.id,
actorOrgId: actor.orgId,
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.PAM
});
const accountsWithResourceDetails = await pamAccountDAL.findWithResourceDetails({ projectId });
const limit = params.limit || 20;
const offset = params.offset || 0;
const canReadFolders = permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.PamFolders);
const folders = canReadFolders ? await pamFolderDAL.find({ projectId }) : [];
const folder = accountPath === "/" ? null : await pamFolderDAL.findByPath(projectId, accountPath);
if (accountPath !== "/" && !folder) {
return { accounts: [], folders: [], totalCount: 0, folderPaths: {} };
}
const folderId = folder?.id;
let totalFolderCount = 0;
if (canReadFolders && accountView === PamAccountView.Nested) {
const { totalCount } = await pamFolderDAL.findByProjectId({
projectId,
parentId: folderId,
search: params.search
});
totalFolderCount = totalCount;
}
let folders: TPamFolders[] = [];
if (canReadFolders && accountView === PamAccountView.Nested && offset < totalFolderCount) {
const folderLimit = Math.min(limit, totalFolderCount - offset);
const { folders: foldersResp } = await pamFolderDAL.findByProjectId({
projectId,
parentId: folderId,
limit: folderLimit,
offset,
search: params.search,
orderBy: params.orderBy,
orderDirection: params.orderDirection
});
folders = foldersResp;
}
let accountsWithResourceDetails: Awaited<
ReturnType<typeof pamAccountDAL.findByProjectIdWithResourceDetails>
>["accounts"] = [];
let totalAccountCount = 0;
const accountsToFetch = limit - folders.length;
if (accountsToFetch > 0) {
const accountOffset = Math.max(0, offset - totalFolderCount);
const { accounts, totalCount } = await pamAccountDAL.findByProjectIdWithResourceDetails({
projectId,
folderId,
accountView,
offset: accountOffset,
limit: accountsToFetch,
search: params.search,
orderBy: params.orderBy,
orderDirection: params.orderDirection,
filterResourceIds: params.filterResourceIds
});
accountsWithResourceDetails = accounts;
totalAccountCount = totalCount;
} else {
// if no accounts are to be fetched for the current page, we still need the total count for pagination
const { totalCount } = await pamAccountDAL.findByProjectIdWithResourceDetails({
projectId,
folderId,
accountView,
search: params.search,
filterResourceIds: params.filterResourceIds
});
totalAccountCount = totalCount;
}
const totalCount = totalFolderCount + totalAccountCount;
const decryptedAndPermittedAccounts: Array<
TPamAccounts & {
@@ -359,12 +436,6 @@ export const pamAccountServiceFactory = ({
> = [];
for await (const account of accountsWithResourceDetails) {
const accountPath = await getFullPamFolderPath({
pamFolderDAL,
folderId: account.folderId,
projectId: account.projectId
});
// Check permission for each individual account
if (
permission.can(
@@ -391,9 +462,27 @@ export const pamAccountServiceFactory = ({
}
}
const folderPaths: Record<string, string> = {};
const accountFolderIds = [
...new Set(decryptedAndPermittedAccounts.flatMap((a) => (a.folderId ? [a.folderId] : [])))
];
await Promise.all(
accountFolderIds.map(async (fId) => {
folderPaths[fId] = await getFullPamFolderPath({
pamFolderDAL,
folderId: fId,
projectId
});
})
);
return {
accounts: decryptedAndPermittedAccounts,
folders
folders,
totalCount,
folderId,
folderPaths
};
};
@@ -486,11 +575,11 @@ export const pamAccountServiceFactory = ({
case PamResource.Postgres:
case PamResource.MySQL:
{
const connectionCredentials = await decryptResourceConnectionDetails({
const connectionCredentials = (await decryptResourceConnectionDetails({
encryptedConnectionDetails: resource.encryptedConnectionDetails,
kmsService,
projectId: account.projectId
});
})) as TSqlResourceConnectionDetails;
const credentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
@@ -506,6 +595,19 @@ export const pamAccountServiceFactory = ({
};
}
break;
case PamResource.SSH:
{
const credentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
kmsService,
projectId: account.projectId
});
metadata = {
username: credentials.username
};
}
break;
default:
break;
}
@@ -566,11 +668,6 @@ export const pamAccountServiceFactory = ({
throw new BadRequestError({ message: "Session has ended or expired" });
}
// Verify that the session has not already had credentials fetched
if (session.status !== PamSessionStatus.Starting) {
throw new BadRequestError({ message: "Session has already been started" });
}
const account = await pamAccountDAL.findById(session.accountId);
if (!account) throw new NotFoundError({ message: `Account with ID '${session.accountId}' not found` });
@@ -587,11 +684,16 @@ export const pamAccountServiceFactory = ({
const decryptedResource = await decryptResource(resource, session.projectId, kmsService);
let sessionStarted = false;
// Mark session as started
await pamSessionDAL.updateById(sessionId, {
status: PamSessionStatus.Active,
startedAt: new Date()
});
if (session.status === PamSessionStatus.Starting) {
await pamSessionDAL.updateById(sessionId, {
status: PamSessionStatus.Active,
startedAt: new Date()
});
sessionStarted = true;
}
return {
credentials: {
@@ -599,7 +701,8 @@ export const pamAccountServiceFactory = ({
...decryptedAccount.credentials
},
projectId: project.id,
account
account,
sessionStarted
};
};

View File

@@ -1,4 +1,7 @@
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
import { TPamAccount } from "../pam-resource/pam-resource-types";
import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums";
// DTOs
export type TCreateAccountDTO = Pick<
@@ -18,3 +21,14 @@ export type TAccessAccountDTO = {
actorUserAgent: string;
duration: number;
};
export type TListAccountsDTO = {
accountPath: string;
accountView: PamAccountView;
search?: string;
orderBy?: PamAccountOrderBy;
orderDirection?: OrderByDirection;
limit?: number;
offset?: number;
filterResourceIds?: string[];
} & TProjectPermission;

View File

@@ -1,9 +1,106 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { PamAccountOrderBy } from "../pam-account/pam-account-enums";
export type TPamFolderDALFactory = ReturnType<typeof pamFolderDALFactory>;
export const pamFolderDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.PamFolder);
return { ...orm };
const findByProjectId = async (
{
projectId,
parentId,
search,
limit,
offset = 0,
orderBy = PamAccountOrderBy.Name,
orderDirection = OrderByDirection.ASC
}: {
projectId: string;
parentId?: string | null;
search?: string;
limit?: number;
offset?: number;
orderBy?: PamAccountOrderBy;
orderDirection?: OrderByDirection;
},
tx?: Knex
) => {
try {
const dbInstance = tx || db.replicaNode();
const query = dbInstance(TableName.PamFolder).where(`${TableName.PamFolder}.projectId`, projectId);
if (parentId) {
void query.where(`${TableName.PamFolder}.parentId`, parentId);
} else {
void query.whereNull(`${TableName.PamFolder}.parentId`);
}
if (search) {
// escape special characters (`%`, `_`) and the escape character itself (`\`)
const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
void query.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamFolder, "name", `%${escapedSearch}%`]);
}
const countQuery = query.clone().count("*", { as: "count" }).first();
void query.select(selectAllTableCols(TableName.PamFolder));
const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
void query.orderByRaw(`${TableName.PamFolder}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]);
if (typeof limit === "number") {
void query.limit(limit).offset(offset);
}
const [folders, countResult] = await Promise.all([query, countQuery]);
const totalCount = Number(countResult?.count || 0);
return { folders, totalCount };
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM folders" });
}
};
const findByPath = async (projectId: string, path: string, tx?: Knex) => {
try {
const dbInstance = tx || db.replicaNode();
const pathSegments = path.split("/").filter(Boolean);
let parentId: string | null = null;
let currentFolder: Awaited<ReturnType<typeof orm.findOne>> | undefined;
for await (const segment of pathSegments) {
const query = dbInstance(TableName.PamFolder)
.where(`${TableName.PamFolder}.projectId`, projectId)
.where(`${TableName.PamFolder}.name`, segment);
if (parentId) {
void query.where(`${TableName.PamFolder}.parentId`, parentId);
} else {
void query.whereNull(`${TableName.PamFolder}.parentId`);
}
currentFolder = await query.first();
if (!currentFolder) {
return undefined;
}
parentId = currentFolder.id;
}
return currentFolder;
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM folder by path" });
}
};
return { ...orm, findByProjectId, findByPath };
};

View File

@@ -2,7 +2,11 @@ import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { PamResourceOrderBy } from "./pam-resource-enums";
export type TPamResourceDALFactory = ReturnType<typeof pamResourceDALFactory>;
export const pamResourceDALFactory = (db: TDbClient) => {
@@ -20,5 +24,65 @@ export const pamResourceDALFactory = (db: TDbClient) => {
return doc;
};
return { ...orm, findById };
const findByProjectId = async (
{
projectId,
search,
limit,
offset = 0,
orderBy = PamResourceOrderBy.Name,
orderDirection = OrderByDirection.ASC,
filterResourceTypes
}: {
projectId: string;
search?: string;
limit?: number;
offset?: number;
orderBy?: PamResourceOrderBy;
orderDirection?: OrderByDirection;
filterResourceTypes?: string[];
},
tx?: Knex
) => {
try {
const dbInstance = tx || db.replicaNode();
const query = dbInstance(TableName.PamResource).where(`${TableName.PamResource}.projectId`, projectId);
if (search) {
// escape special characters (`%`, `_`) and the escape character itself (`\`)
const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
const pattern = `%${escapedSearch}%`;
void query.where((q) => {
void q
.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "name", pattern])
.orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "resourceType", pattern]);
});
}
if (filterResourceTypes && filterResourceTypes.length) {
void query.whereIn(`${TableName.PamResource}.resourceType`, filterResourceTypes);
}
const countQuery = query.clone().count("*", { as: "count" }).first();
void query.select(selectAllTableCols(TableName.PamResource));
const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
void query.orderByRaw(`${TableName.PamResource}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]);
if (typeof limit === "number") {
void query.limit(limit).offset(offset);
}
const [resources, countResult] = await Promise.all([query, countQuery]);
const totalCount = Number(countResult?.count || 0);
return { resources, totalCount };
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM resources" });
}
};
return { ...orm, findById, findByProjectId };
};

View File

@@ -1,4 +1,9 @@
export enum PamResource {
Postgres = "postgres",
MySQL = "mysql"
MySQL = "mysql",
SSH = "ssh"
}
export enum PamResourceOrderBy {
Name = "name"
}

View File

@@ -1,10 +1,12 @@
import { PamResource } from "./pam-resource-enums";
import { TPamAccountCredentials, TPamResourceConnectionDetails, TPamResourceFactory } from "./pam-resource-types";
import { sqlResourceFactory } from "./shared/sql/sql-resource-factory";
import { sshResourceFactory } from "./ssh/ssh-resource-factory";
type TPamResourceFactoryImplementation = TPamResourceFactory<TPamResourceConnectionDetails, TPamAccountCredentials>;
export const PAM_RESOURCE_FACTORY_MAP: Record<PamResource, TPamResourceFactoryImplementation> = {
[PamResource.Postgres]: sqlResourceFactory as TPamResourceFactoryImplementation,
[PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation
[PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation,
[PamResource.SSH]: sshResourceFactory as TPamResourceFactoryImplementation
};

View File

@@ -20,7 +20,7 @@ import {
encryptResourceConnectionDetails,
listResourceOptions
} from "./pam-resource-fns";
import { TCreateResourceDTO, TUpdateResourceDTO } from "./pam-resource-types";
import { TCreateResourceDTO, TListResourcesDTO, TUpdateResourceDTO } from "./pam-resource-types";
type TPamResourceServiceFactoryDep = {
pamResourceDAL: TPamResourceDALFactory;
@@ -192,19 +192,18 @@ export const pamResourceServiceFactory = ({
gatewayV2Service
);
// Logic to prevent overwriting unedited censored values
const finalCredentials = { ...rotationAccountCredentials };
if (
resource.encryptedRotationAccountCredentials &&
rotationAccountCredentials.password === "__INFISICAL_UNCHANGED__"
) {
let finalCredentials = { ...rotationAccountCredentials };
if (resource.encryptedRotationAccountCredentials) {
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: resource.encryptedRotationAccountCredentials,
projectId: resource.projectId,
kmsService
});
finalCredentials.password = decryptedCredentials.password;
finalCredentials = await factory.handleOverwritePreventionForCensoredValues(
rotationAccountCredentials,
decryptedCredentials
);
}
try {
@@ -268,22 +267,23 @@ export const pamResourceServiceFactory = ({
}
};
const list = async (projectId: string, actor: OrgServiceActor) => {
const list = async ({ projectId, actor, actorId, actorAuthMethod, actorOrgId, ...params }: TListResourcesDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorAuthMethod: actor.authMethod,
actorId: actor.id,
actorOrgId: actor.orgId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId,
actionProjectType: ActionProjectType.PAM
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PamResources);
const resources = await pamResourceDAL.find({ projectId });
const { resources, totalCount } = await pamResourceDAL.findByProjectId({ projectId, ...params });
return {
resources: await Promise.all(resources.map((resource) => decryptResource(resource, projectId, kmsService)))
resources: await Promise.all(resources.map((resource) => decryptResource(resource, projectId, kmsService))),
totalCount
};
};

View File

@@ -1,3 +1,5 @@
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service";
import {
TMySQLAccount,
@@ -5,22 +7,31 @@ import {
TMySQLResource,
TMySQLResourceConnectionDetails
} from "./mysql/mysql-resource-types";
import { PamResource } from "./pam-resource-enums";
import { PamResource, PamResourceOrderBy } from "./pam-resource-enums";
import {
TPostgresAccount,
TPostgresAccountCredentials,
TPostgresResource,
TPostgresResourceConnectionDetails
} from "./postgres/postgres-resource-types";
import {
TSSHAccount,
TSSHAccountCredentials,
TSSHResource,
TSSHResourceConnectionDetails
} from "./ssh/ssh-resource-types";
// Resource types
export type TPamResource = TPostgresResource | TMySQLResource;
export type TPamResourceConnectionDetails = TPostgresResourceConnectionDetails | TMySQLResourceConnectionDetails;
export type TPamResource = TPostgresResource | TMySQLResource | TSSHResource;
export type TPamResourceConnectionDetails =
| TPostgresResourceConnectionDetails
| TMySQLResourceConnectionDetails
| TSSHResourceConnectionDetails;
// Account types
export type TPamAccount = TPostgresAccount | TMySQLAccount;
export type TPamAccount = TPostgresAccount | TMySQLAccount | TSSHAccount;
// eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents
export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials;
export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials | TSSHAccountCredentials;
// Resource DTOs
export type TCreateResourceDTO = Pick<
@@ -32,6 +43,15 @@ export type TUpdateResourceDTO = Partial<Omit<TCreateResourceDTO, "resourceType"
resourceId: string;
};
export type TListResourcesDTO = {
search?: string;
orderBy?: PamResourceOrderBy;
orderDirection?: OrderByDirection;
limit?: number;
offset?: number;
filterResourceTypes?: string[];
} & TProjectPermission;
// Resource factory
export type TPamResourceFactoryValidateConnection<T extends TPamResourceConnectionDetails> = () => Promise<T>;
export type TPamResourceFactoryValidateAccountCredentials<C extends TPamAccountCredentials> = (
@@ -51,4 +71,5 @@ export type TPamResourceFactory<T extends TPamResourceConnectionDetails, C exten
validateConnection: TPamResourceFactoryValidateConnection<T>;
validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<C>;
rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<C>;
handleOverwritePreventionForCensoredValues: (updatedAccountCredentials: C, currentCredentials: C) => Promise<C>;
};

View File

@@ -337,9 +337,24 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
}
};
const handleOverwritePreventionForCensoredValues = async (
updatedAccountCredentials: TSqlAccountCredentials,
currentCredentials: TSqlAccountCredentials
) => {
if (updatedAccountCredentials.password === "__INFISICAL_UNCHANGED__") {
return {
...updatedAccountCredentials,
password: currentCredentials.password
};
}
return updatedAccountCredentials;
};
return {
validateConnection,
validateAccountCredentials,
rotateAccountCredentials
rotateAccountCredentials,
handleOverwritePreventionForCensoredValues
};
};

View File

@@ -0,0 +1,5 @@
export enum SSHAuthMethod {
Password = "password",
PublicKey = "public-key",
Certificate = "certificate"
}

View File

@@ -0,0 +1,265 @@
import { Client } from "ssh2";
import { BadRequestError } from "@app/lib/errors";
import { GatewayProxyProtocol } from "@app/lib/gateway";
import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2";
import { logger } from "@app/lib/logger";
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
import { TGatewayV2ServiceFactory } from "../../gateway-v2/gateway-v2-service";
import { PamResource } from "../pam-resource-enums";
import {
TPamResourceFactory,
TPamResourceFactoryRotateAccountCredentials,
TPamResourceFactoryValidateAccountCredentials
} from "../pam-resource-types";
import { SSHAuthMethod } from "./ssh-resource-enums";
import { TSSHAccountCredentials, TSSHResourceConnectionDetails } from "./ssh-resource-types";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
export const executeWithGateway = async <T>(
config: {
connectionDetails: TSSHResourceConnectionDetails;
resourceType: PamResource;
gatewayId: string;
},
gatewayV2Service: Pick<TGatewayV2ServiceFactory, "getPlatformConnectionDetailsByGatewayId">,
operation: (proxyPort: number) => Promise<T>
): Promise<T> => {
const { connectionDetails, gatewayId } = config;
const [targetHost] = await verifyHostInputValidity(connectionDetails.host, true);
const platformConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({
gatewayId,
targetHost,
targetPort: connectionDetails.port
});
if (!platformConnectionDetails) {
throw new BadRequestError({ message: "Unable to connect to gateway, no platform connection details found" });
}
return withGatewayV2Proxy(
async (proxyPort) => {
return operation(proxyPort);
},
{
protocol: GatewayProxyProtocol.Tcp,
relayHost: platformConnectionDetails.relayHost,
gateway: platformConnectionDetails.gateway,
relay: platformConnectionDetails.relay
}
);
};
export const sshResourceFactory: TPamResourceFactory<TSSHResourceConnectionDetails, TSSHAccountCredentials> = (
resourceType,
connectionDetails,
gatewayId,
gatewayV2Service
) => {
const validateConnection = async () => {
try {
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => {
return new Promise<void>((resolve, reject) => {
const client = new Client();
let handshakeComplete = false;
client.on("error", (err) => {
logger.info(
{ error: err.message, handshakeComplete },
"[SSH Resource Factory] SSH client error event received"
);
// If we got an authentication error, it means we successfully reached the SSH server
// and completed the SSH handshake - that's good enough for connection validation
if (handshakeComplete || err.message.includes("authentication") || err.message.includes("publickey")) {
logger.info(
{ handshakeComplete, errorMessage: err.message },
"[SSH Resource Factory] SSH connection validation succeeded (auth error after handshake)"
);
client.end();
resolve();
} else {
logger.error(
{ error: err.message, handshakeComplete },
"[SSH Resource Factory] SSH connection validation failed"
);
reject(err);
}
});
client.on("handshake", () => {
// SSH handshake completed - the server is reachable and responding
logger.info("[SSH Resource Factory] SSH handshake event received - setting handshakeComplete to true");
handshakeComplete = true;
client.end();
resolve();
});
client.on("timeout", () => {
logger.error("[SSH Resource Factory] SSH connection timeout");
reject(new Error("Connection timeout"));
});
// Attempt connection with a dummy username (we don't care about auth success)
// The goal is just to verify SSH server is reachable and responding
client.connect({
host: "localhost",
port: proxyPort,
username: "infisical-connection-test",
password: "infisical-connection-test-password",
readyTimeout: EXTERNAL_REQUEST_TIMEOUT,
tryKeyboard: false,
// We want to fail fast on auth, we're just testing reachability
authHandler: () => {
// If authHandler is called, SSH handshake succeeded
handshakeComplete = true;
return false; // Don't continue with auth
}
});
});
});
return connectionDetails;
} catch (error) {
throw new BadRequestError({
message: `Unable to validate connection to ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<TSSHAccountCredentials> = async (
credentials
) => {
try {
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => {
return new Promise<void>((resolve, reject) => {
const client = new Client();
client.on("ready", () => {
logger.info(
{ username: credentials.username, authMethod: credentials.authMethod },
"[SSH Resource Factory] SSH authentication successful"
);
client.end();
resolve();
});
client.on("error", (err) => {
logger.error(
{ error: err.message, username: credentials.username, authMethod: credentials.authMethod },
"[SSH Resource Factory] SSH authentication failed"
);
reject(err);
});
client.on("timeout", () => {
logger.error(
{ username: credentials.username, authMethod: credentials.authMethod },
"[SSH Resource Factory] SSH authentication timeout"
);
reject(new Error("Connection timeout"));
});
// Build connection config based on auth method
const baseConfig = {
host: "localhost",
port: proxyPort,
username: credentials.username,
readyTimeout: EXTERNAL_REQUEST_TIMEOUT
};
switch (credentials.authMethod) {
case SSHAuthMethod.Password:
client.connect({
...baseConfig,
password: credentials.password,
tryKeyboard: false
});
break;
case SSHAuthMethod.PublicKey:
client.connect({
...baseConfig,
privateKey: credentials.privateKey,
tryKeyboard: false
});
break;
default:
reject(new Error(`Unsupported SSH auth method: ${(credentials as TSSHAccountCredentials).authMethod}`));
}
});
});
return credentials;
} catch (error) {
if (error instanceof Error) {
// Check for common authentication failure messages
if (
error.message.includes("authentication") ||
error.message.includes("All configured authentication methods failed") ||
error.message.includes("publickey")
) {
throw new BadRequestError({
message: "Account credentials invalid."
});
}
if (error.message === "Connection timeout") {
throw new BadRequestError({
message: "Connection timeout. Verify that the SSH server is reachable"
});
}
}
throw new BadRequestError({
message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<TSSHAccountCredentials> = async (
rotationAccountCredentials
) => {
return rotationAccountCredentials;
};
const handleOverwritePreventionForCensoredValues = async (
updatedAccountCredentials: TSSHAccountCredentials,
currentCredentials: TSSHAccountCredentials
) => {
if (updatedAccountCredentials.authMethod !== currentCredentials.authMethod) {
return updatedAccountCredentials;
}
if (
updatedAccountCredentials.authMethod === SSHAuthMethod.Password &&
currentCredentials.authMethod === SSHAuthMethod.Password
) {
if (updatedAccountCredentials.password === "__INFISICAL_UNCHANGED__") {
return {
...updatedAccountCredentials,
password: currentCredentials.password
};
}
}
if (
updatedAccountCredentials.authMethod === SSHAuthMethod.PublicKey &&
currentCredentials.authMethod === SSHAuthMethod.PublicKey
) {
if (updatedAccountCredentials.privateKey === "__INFISICAL_UNCHANGED__") {
return {
...updatedAccountCredentials,
privateKey: currentCredentials.privateKey
};
}
}
return updatedAccountCredentials;
};
return {
validateConnection,
validateAccountCredentials,
rotateAccountCredentials,
handleOverwritePreventionForCensoredValues
};
};

View File

@@ -0,0 +1,117 @@
import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreatePamAccountSchema,
BaseCreatePamResourceSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdatePamAccountSchema,
BaseUpdatePamResourceSchema
} from "../pam-resource-schemas";
import { SSHAuthMethod } from "./ssh-resource-enums";
export const BaseSSHResourceSchema = BasePamResourceSchema.extend({ resourceType: z.literal(PamResource.SSH) });
export const SSHResourceListItemSchema = z.object({
name: z.literal("SSH"),
resource: z.literal(PamResource.SSH)
});
export const SSHResourceConnectionDetailsSchema = z.object({
host: z.string().trim().max(255),
port: z.number()
});
export const SSHPasswordCredentialsSchema = z.object({
authMethod: z.literal(SSHAuthMethod.Password),
username: z.string().trim().max(255),
password: z.string().trim().max(255)
});
export const SSHPublicKeyCredentialsSchema = z.object({
authMethod: z.literal(SSHAuthMethod.PublicKey),
username: z.string().trim().max(255),
privateKey: z.string().trim().max(5000)
});
export const SSHCertificateCredentialsSchema = z.object({
authMethod: z.literal(SSHAuthMethod.Certificate),
username: z.string().trim().max(255)
});
export const SSHAccountCredentialsSchema = z.discriminatedUnion("authMethod", [
SSHPasswordCredentialsSchema,
SSHPublicKeyCredentialsSchema,
SSHCertificateCredentialsSchema
]);
export const SSHResourceSchema = BaseSSHResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema,
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});
export const SanitizedSSHResourceSchema = BaseSSHResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema,
rotationAccountCredentials: z
.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(SSHAuthMethod.Password),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.PublicKey),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.Certificate),
username: z.string()
})
])
.nullable()
.optional()
});
export const CreateSSHResourceSchema = BaseCreatePamResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema,
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});
export const UpdateSSHResourceSchema = BaseUpdatePamResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});
// Accounts
export const SSHAccountSchema = BasePamAccountSchema.extend({
credentials: SSHAccountCredentialsSchema
});
export const CreateSSHAccountSchema = BaseCreatePamAccountSchema.extend({
credentials: SSHAccountCredentialsSchema
});
export const UpdateSSHAccountSchema = BaseUpdatePamAccountSchema.extend({
credentials: SSHAccountCredentialsSchema.optional()
});
export const SanitizedSSHAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({
credentials: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(SSHAuthMethod.Password),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.PublicKey),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.Certificate),
username: z.string()
})
])
});
// Sessions
export const SSHSessionCredentialsSchema = SSHResourceConnectionDetailsSchema.and(SSHAccountCredentialsSchema);

View File

@@ -0,0 +1,16 @@
import { z } from "zod";
import {
SSHAccountCredentialsSchema,
SSHAccountSchema,
SSHResourceConnectionDetailsSchema,
SSHResourceSchema
} from "./ssh-resource-schemas";
// Resources
export type TSSHResource = z.infer<typeof SSHResourceSchema>;
export type TSSHResourceConnectionDetails = z.infer<typeof SSHResourceConnectionDetailsSchema>;
// Accounts
export type TSSHAccount = z.infer<typeof SSHAccountSchema>;
export type TSSHAccountCredentials = z.infer<typeof SSHAccountCredentialsSchema>;

View File

@@ -2,7 +2,7 @@ import { TPamSessions } from "@app/db/schemas";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TPamSanitizedSession, TPamSessionCommandLog } from "./pam-session.types";
import { TPamSanitizedSession, TPamSessionCommandLog, TTerminalEvent } from "./pam-session-types";
export const decryptSessionCommandLogs = async ({
projectId,
@@ -22,7 +22,7 @@ export const decryptSessionCommandLogs = async ({
cipherTextBlob: encryptedLogs
});
return JSON.parse(decryptedPlainTextBlob.toString()) as TPamSessionCommandLog;
return JSON.parse(decryptedPlainTextBlob.toString()) as (TPamSessionCommandLog | TTerminalEvent)[];
};
export const decryptSession = async (
@@ -32,7 +32,7 @@ export const decryptSession = async (
) => {
return {
...session,
commandLogs: session.encryptedLogsBlob
logs: session.encryptedLogsBlob
? await decryptSessionCommandLogs({
projectId,
encryptedLogs: session.encryptedLogsBlob,

View File

@@ -8,8 +8,18 @@ export const PamSessionCommandLogSchema = z.object({
timestamp: z.coerce.date()
});
// SSH Terminal Event schemas
export const TerminalEventTypeSchema = z.enum(["input", "output", "resize", "error"]);
export const TerminalEventSchema = z.object({
timestamp: z.coerce.date(),
eventType: TerminalEventTypeSchema,
data: z.string(), // Base64 encoded binary data
elapsedTime: z.number() // Seconds since session start (for replay)
});
export const SanitizedSessionSchema = PamSessionsSchema.omit({
encryptedLogsBlob: true
}).extend({
commandLogs: PamSessionCommandLogSchema.array()
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
});

View File

@@ -12,10 +12,10 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
import { ProjectPermissionPamSessionActions, ProjectPermissionSub } from "../permission/project-permission";
import { TUpdateSessionLogsDTO } from "./pam-session.types";
import { TPamSessionDALFactory } from "./pam-session-dal";
import { PamSessionStatus } from "./pam-session-enums";
import { decryptSession } from "./pam-session-fns";
import { TUpdateSessionLogsDTO } from "./pam-session-types";
type TPamSessionServiceFactoryDep = {
pamSessionDAL: TPamSessionDALFactory;

View File

@@ -1,12 +1,13 @@
import { z } from "zod";
import { PamSessionCommandLogSchema, SanitizedSessionSchema } from "./pam-session-schemas";
import { PamSessionCommandLogSchema, SanitizedSessionSchema, TerminalEventSchema } from "./pam-session-schemas";
export type TPamSessionCommandLog = z.infer<typeof PamSessionCommandLogSchema>;
export type TTerminalEvent = z.infer<typeof TerminalEventSchema>;
export type TPamSanitizedSession = z.infer<typeof SanitizedSessionSchema>;
// DTOs
export type TUpdateSessionLogsDTO = {
sessionId: string;
logs: TPamSessionCommandLog[];
logs: (TPamSessionCommandLog | TTerminalEvent)[];
};

View File

@@ -74,7 +74,12 @@ export const pkiAcmeChallengeServiceFactory = ({
// Notice: well, we are in a transaction, ideally we should not hold transaction and perform
// a long running operation for long time. But assuming we are not performing a tons of
// challenge validation at the same time, it should be fine.
const challengeResponse = await fetch(challengeUrl, { signal: AbortSignal.timeout(timeoutMs) });
const challengeResponse = await fetch(challengeUrl, {
// In case if we override the host in the development mode, still provide the original host in the header
// to help the upstream server to validate the request
headers: { Host: host },
signal: AbortSignal.timeout(timeoutMs)
});
if (challengeResponse.status !== 200) {
throw new AcmeIncorrectResponseError({
message: `ACME challenge response is not 200: ${challengeResponse.status}`

View File

@@ -58,7 +58,15 @@ export const GetAcmeDirectoryResponseSchema = z.object({
newNonce: z.string(),
newAccount: z.string(),
newOrder: z.string(),
revokeCert: z.string().optional()
revokeCert: z.string().optional(),
meta: z
.object({
termsOfService: z.string().optional(),
website: z.string().optional(),
caaIdentities: z.array(z.string()).optional(),
externalAccountRequired: z.boolean().optional()
})
.optional()
});
// New Account payload schema

View File

@@ -206,6 +206,9 @@ export const pkiAcmeServiceFactory = ({
const { protectedHeader: rawProtectedHeader, payload: rawPayload } = result;
try {
const protectedHeader = ProtectedHeaderSchema.parse(rawProtectedHeader);
if (protectedHeader.jwk && protectedHeader.kid) {
throw new AcmeMalformedError({ message: "Both JWK and KID are provided in the protected header" });
}
const parsedUrl = (() => {
try {
return new URL(protectedHeader.url);
@@ -288,6 +291,7 @@ export const pkiAcmeServiceFactory = ({
url,
rawJwsPayload,
getJWK: async (protectedHeader) => {
// get jwk instead of kid
if (!protectedHeader.kid) {
throw new AcmeMalformedError({ message: "KID is required in the protected header" });
}
@@ -353,7 +357,10 @@ export const pkiAcmeServiceFactory = ({
return {
newNonce: buildUrl(profile.id, "/new-nonce"),
newAccount: buildUrl(profile.id, "/new-account"),
newOrder: buildUrl(profile.id, "/new-order")
newOrder: buildUrl(profile.id, "/new-order"),
meta: {
externalAccountRequired: true
}
};
};
@@ -386,11 +393,61 @@ export const pkiAcmeServiceFactory = ({
payload: TCreateAcmeAccountPayload;
}): Promise<TAcmeResponse<TCreateAcmeAccountResponse>> => {
const profile = await validateAcmeProfile(profileId);
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
profileId,
alg,
publicKeyThumbprint
);
if (onlyReturnExisting) {
if (!existingAccount) {
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
}
return {
status: 200,
body: {
status: "valid",
contact: existingAccount.emails,
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
},
headers: {
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
}
};
}
// Note: We only check EAB for the new account request. This is a very special case for cert-manager.
// There's a bug in their ACME client implementation, they don't take the account KID value they have
// and relying on a '{"onlyReturnExisting": true}' new-account request to find out their KID value.
// But the problem is, that new-account request doesn't come with EAB. And while the get existing account operation
// fails, they just discard the error and proceed to request a new order. Since no KID provided, their ACME
// client will send JWK instead. As a result, we are seeing KID not provide in header error for the new-order
// endpoint.
//
// To solve the problem, we lose the check for EAB a bit for the onlyReturnExisting new account request.
// It should be fine as we've already checked EAB when they created the account.
// And the private key ownership indicating they are the same user.
// ref: https://github.com/cert-manager/cert-manager/issues/7388#issuecomment-3535630925
if (!externalAccountBinding) {
throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" });
}
if (existingAccount) {
return {
status: 200,
body: {
status: "valid",
contact: existingAccount.emails,
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
},
headers: {
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
}
};
}
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: profile.projectId,
projectDAL,
@@ -441,30 +498,7 @@ export const pkiAcmeServiceFactory = ({
});
}
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
profileId,
alg,
publicKeyThumbprint
);
if (onlyReturnExisting && !existingAccount) {
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
}
if (existingAccount) {
// With the same public key, we found an existing account, just return it
return {
status: 200,
body: {
status: "valid",
contact: existingAccount.emails,
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
},
headers: {
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
}
};
}
// TODO: handle unique constraint violation error, should be very very rare
const newAccount = await acmeAccountDAL.create({
profileId: profile.id,
alg,

View File

@@ -181,11 +181,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt"),
tx
.ref("shouldCheckSecretPermission")
.withSchema(TableName.SecretApprovalPolicy)
.as("policySecretReadAccessCompat")
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
);
const findById = async (id: string, tx?: Knex) => {
@@ -225,8 +221,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt,
allowedSelfApprovals: el.policyAllowedSelfApprovals,
shouldCheckSecretPermission: el.policySecretReadAccessCompat
allowedSelfApprovals: el.policyAllowedSelfApprovals
}
}),
childrenMapper: [

View File

@@ -106,6 +106,16 @@ export const GROUPS = {
filterUsers:
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
},
LIST_PROJECTS: {
id: "The ID of the group to list projects for.",
offset: "The offset to start from. If you enter 10, it will start from the 10th project.",
limit: "The number of projects to return.",
search: "The text string that project name or slug will be filtered by.",
filterProjects:
"Whether to filter the list of returned projects. 'assignedProjects' will only return projects assigned to the group, 'unassignedProjects' will only return projects not assigned to the group, undefined will return all projects in the organization.",
orderBy: "The column to order projects by.",
orderDirection: "The direction to order projects in."
},
ADD_USER: {
id: "The ID of the group to add the user to.",
username: "The username of the user to add to the group."
@@ -584,6 +594,10 @@ export const TOKEN_AUTH = {
offset: "The offset to start from. If you enter 10, it will start from the 10th token.",
limit: "The number of tokens to return."
},
GET_TOKEN: {
identityId: "The ID of the machine identity to get the token for.",
tokenId: "The ID of the token to get metadata for."
},
CREATE_TOKEN: {
identityId: "The ID of the machine identity to create the token for.",
name: "The name of the token to create."

View File

@@ -400,7 +400,7 @@ const envSchema = z
isAcmeDevelopmentMode: data.NODE_ENV === "development" && data.ACME_DEVELOPMENT_MODE,
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
isBddNockApiEnabled: data.NODE_ENV === "development" && data.BDD_NOCK_API_ENABLED,
isBddNockApiEnabled: data.NODE_ENV !== "production" && data.BDD_NOCK_API_ENABLED,
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()
?.split(",")
.map((el) => {

View File

@@ -0,0 +1,104 @@
import type { Definition } from "nock";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { ForbiddenRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
// When running in production, we don't want to even import nock, because it's not needed and it increases memory usage a lots.
// It once caused an outage in the production environment.
// This is why we would rather to crash the app if it's not in development mode (in that case, Kubernetes should stop it from rolling out).
if (process.env.NODE_ENV === "production") {
throw new Error("BDD Nock API can only be enabled in development or test mode");
}
export const registerBddNockRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const importNock = async () => {
// eslint-disable-next-line import/no-extraneous-dependencies
const { default: nock } = await import("nock");
return nock;
};
const checkIfBddNockApiEnabled = () => {
// Note: Please note that this API is only available in development mode and only for BDD tests.
// This endpoint should NEVER BE ENABLED IN PRODUCTION!
if (appCfg.NODE_ENV === "production" || !appCfg.isBddNockApiEnabled) {
throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
}
};
server.route({
method: "POST",
url: "/define",
schema: {
body: z.object({ definitions: z.unknown().array() }),
response: {
200: z.object({ status: z.string() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
checkIfBddNockApiEnabled();
const { body } = req;
const { definitions } = body;
logger.info(definitions, "Defining nock");
const processedDefinitions = definitions.map((definition: unknown) => {
const { path, ...rest } = definition as Definition;
return {
...rest,
path:
path !== undefined && typeof path === "string"
? path
: new RegExp((path as unknown as { regex: string }).regex ?? "")
} as Definition;
});
const nock = await importNock();
nock.define(processedDefinitions);
// Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
if (!nock.isActive()) {
nock.activate();
}
return { status: "ok" };
}
});
server.route({
method: "POST",
url: "/clean-all",
schema: {
response: {
200: z.object({ status: z.string() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async () => {
checkIfBddNockApiEnabled();
logger.info("Cleaning all nocks");
const nock = await importNock();
nock.cleanAll();
return { status: "ok" };
}
});
server.route({
method: "POST",
url: "/restore",
schema: {
response: {
200: z.object({ status: z.string() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async () => {
checkIfBddNockApiEnabled();
logger.info("Restore network requests from nock");
const nock = await importNock();
nock.restore();
return { status: "ok" };
}
});
};

View File

@@ -0,0 +1,6 @@
export const registerBddNockRouter = async () => {
// This route is only available in development or test mode.
// The actual implementation is in the dev.ts file and will be aliased to that file in development or test mode.
// And if somehow we try to enable it in production, we will throw an error.
throw new Error("BDD Nock should not be enabled in production");
};

View File

@@ -1,3 +1,4 @@
import { registerBddNockRouter } from "@bdd_routes/bdd-nock-router";
import { CronJob } from "cron";
import { Knex } from "knex";
import { monitorEventLoopDelay } from "perf_hooks";
@@ -2431,6 +2432,7 @@ export const registerRoutes = async (
}
}
await kmsService.startService(hsmStatus);
await telemetryQueue.startTelemetryCheck();
await telemetryQueue.startAggregatedEventsJob();
await dailyResourceCleanUp.init();
@@ -2443,7 +2445,6 @@ export const registerRoutes = async (
await pkiSubscriberQueue.startDailyAutoRenewalJob();
await pkiAlertV2Queue.init();
await certificateV3Queue.init();
await kmsService.startService(hsmStatus);
await microsoftTeamsService.start();
await dynamicSecretQueueService.init();
await eventBusService.init();
@@ -2698,6 +2699,12 @@ export const registerRoutes = async (
await server.register(registerV3Routes, { prefix: "/api/v3" });
await server.register(registerV4Routes, { prefix: "/api/v4" });
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
// This route should NEVER BE ENABLED IN PRODUCTION!
if (getConfig().isBddNockApiEnabled) {
await server.register(registerBddNockRouter, { prefix: "/api/__bdd_nock__" });
}
server.addHook("onClose", async () => {
cronJobs.forEach((job) => job.stop());
await telemetryService.flushAll();

View File

@@ -9,6 +9,8 @@ import {
SuperAdminSchema,
UsersSchema
} from "@app/db/schemas";
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
import { LicenseType } from "@app/ee/services/license/license-types";
import { getConfig, overridableKeys } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError } from "@app/lib/errors";
@@ -65,6 +67,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
const config = await getServerCfg();
const serverEnvs = getConfig();
const licenseKeyConfig = getLicenseKeyConfig();
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
return {
config: {
...config,
@@ -73,7 +78,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED,
isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE
isOfflineUsageReportsEnabled: hasOfflineLicense
}
};
}

View File

@@ -1,87 +0,0 @@
// import { z } from "zod";
// import { getConfig } from "@app/lib/config/env";
// import { ForbiddenRequestError } from "@app/lib/errors";
// import { logger } from "@app/lib/logger";
// import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
// import { AuthMode } from "@app/services/auth/auth-type";
// export const registerBddNockRouter = async (server: FastifyZodProvider) => {
// const checkIfBddNockApiEnabled = () => {
// const appCfg = getConfig();
// // Note: Please note that this API is only available in development mode and only for BDD tests.
// // This endpoint should NEVER BE ENABLED IN PRODUCTION!
// if (appCfg.NODE_ENV !== "development" || !appCfg.isBddNockApiEnabled) {
// throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
// }
// };
// server.route({
// method: "POST",
// url: "/define",
// schema: {
// body: z.object({ definitions: z.unknown().array() }),
// response: {
// 200: z.object({ status: z.string() })
// }
// },
// onRequest: verifyAuth([AuthMode.JWT]),
// handler: async (req) => {
// checkIfBddNockApiEnabled();
// const { body } = req;
// const { definitions } = body;
// logger.info(definitions, "Defining nock");
// const processedDefinitions = definitions.map((definition: unknown) => {
// const { path, ...rest } = definition as Definition;
// return {
// ...rest,
// path:
// path !== undefined && typeof path === "string"
// ? path
// : new RegExp((path as unknown as { regex: string }).regex ?? "")
// } as Definition;
// });
// nock.define(processedDefinitions);
// // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
// if (!nock.isActive()) {
// nock.activate();
// }
// return { status: "ok" };
// }
// });
// server.route({
// method: "POST",
// url: "/clean-all",
// schema: {
// response: {
// 200: z.object({ status: z.string() })
// }
// },
// onRequest: verifyAuth([AuthMode.JWT]),
// handler: async () => {
// checkIfBddNockApiEnabled();
// logger.info("Cleaning all nocks");
// nock.cleanAll();
// return { status: "ok" };
// }
// });
// server.route({
// method: "POST",
// url: "/restore",
// schema: {
// response: {
// 200: z.object({ status: z.string() })
// }
// },
// onRequest: verifyAuth([AuthMode.JWT]),
// handler: async () => {
// checkIfBddNockApiEnabled();
// logger.info("Restore network requests from nock");
// nock.restore();
// return { status: "ok" };
// }
// });
// };

View File

@@ -314,7 +314,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
tokenType: z.literal("Bearer"),
tokenData: IdentityAccessTokensSchema
})
}
},
@@ -346,7 +347,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityTokenAuth.accessTokenTTL,
accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL
accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL,
tokenData: identityAccessToken
};
}
});
@@ -406,6 +408,60 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
}
});
server.route({
method: "GET",
url: "/token-auth/identities/:identityId/tokens/:tokenId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.TokenAuth],
description: "Get token for machine identity with Token Auth",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().describe(TOKEN_AUTH.GET_TOKEN.identityId),
tokenId: z.string().describe(TOKEN_AUTH.GET_TOKEN.tokenId)
}),
response: {
200: z.object({
token: IdentityAccessTokensSchema
})
}
},
handler: async (req) => {
const { token, identityMembershipOrg } = await server.services.identityTokenAuth.getTokenAuthTokenById({
identityId: req.params.identityId,
tokenId: req.params.tokenId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
isActorSuperAdmin: isSuperAdmin(req.auth)
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg.scopeOrgId,
event: {
type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH,
metadata: {
identityId: token.identityId,
identityName: identityMembershipOrg.identity.name,
tokenId: token.id
}
}
});
return { token };
}
});
server.route({
method: "PATCH",
url: "/token-auth/tokens/:tokenId",

View File

@@ -8,7 +8,6 @@ import { registerSecretSyncRouter, SECRET_SYNC_REGISTER_ROUTER_MAP } from "@app/
import { registerAdminRouter } from "./admin-router";
import { registerAuthRoutes } from "./auth-router";
// import { registerBddNockRouter } from "./bdd-nock-router";
import { registerProjectBotRouter } from "./bot-router";
import { registerCaRouter } from "./certificate-authority-router";
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
@@ -238,10 +237,4 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await server.register(registerEventRouter, { prefix: "/events" });
await server.register(registerUpgradePathRouter, { prefix: "/upgrade-path" });
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
// This route should NEVER BE ENABLED IN PRODUCTION!
// if (getConfig().isBddNockApiEnabled) {
// await server.register(registerBddNockRouter, { prefix: "/bdd-nock" });
// }
};

View File

@@ -0,0 +1,22 @@
import {
AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION,
AwsSecretsManagerPkiSyncSchema,
CreateAwsSecretsManagerPkiSyncSchema,
UpdateAwsSecretsManagerPkiSyncSchema
} from "@app/services/pki-sync/aws-secrets-manager";
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
import { registerSyncPkiEndpoints } from "./pki-sync-endpoints";
export const registerAwsSecretsManagerPkiSyncRouter = async (server: FastifyZodProvider) =>
registerSyncPkiEndpoints({
destination: PkiSync.AwsSecretsManager,
server,
responseSchema: AwsSecretsManagerPkiSyncSchema,
createSchema: CreateAwsSecretsManagerPkiSyncSchema,
updateSchema: UpdateAwsSecretsManagerPkiSyncSchema,
syncOptions: {
canImportCertificates: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION.canImportCertificates,
canRemoveCertificates: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION.canRemoveCertificates
}
});

View File

@@ -0,0 +1,17 @@
import { ChefPkiSyncSchema, CreateChefPkiSyncSchema, UpdateChefPkiSyncSchema } from "@app/services/pki-sync/chef";
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
import { registerSyncPkiEndpoints } from "./pki-sync-endpoints";
export const registerChefPkiSyncRouter = async (server: FastifyZodProvider) =>
registerSyncPkiEndpoints({
destination: PkiSync.Chef,
server,
responseSchema: ChefPkiSyncSchema,
createSchema: CreateChefPkiSyncSchema,
updateSchema: UpdateChefPkiSyncSchema,
syncOptions: {
canImportCertificates: false,
canRemoveCertificates: true
}
});

View File

@@ -1,11 +1,15 @@
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
import { registerAwsCertificateManagerPkiSyncRouter } from "./aws-certificate-manager-pki-sync-router";
import { registerAwsSecretsManagerPkiSyncRouter } from "./aws-secrets-manager-pki-sync-router";
import { registerAzureKeyVaultPkiSyncRouter } from "./azure-key-vault-pki-sync-router";
import { registerChefPkiSyncRouter } from "./chef-pki-sync-router";
export * from "./pki-sync-router";
export const PKI_SYNC_REGISTER_ROUTER_MAP: Record<PkiSync, (server: FastifyZodProvider) => Promise<void>> = {
[PkiSync.AzureKeyVault]: registerAzureKeyVaultPkiSyncRouter,
[PkiSync.AwsCertificateManager]: registerAwsCertificateManagerPkiSyncRouter
[PkiSync.AwsCertificateManager]: registerAwsCertificateManagerPkiSyncRouter,
[PkiSync.AwsSecretsManager]: registerAwsSecretsManagerPkiSyncRouter,
[PkiSync.Chef]: registerChefPkiSyncRouter
};

View File

@@ -23,6 +23,8 @@ import { mapEnumsForValidation } from "@app/services/certificate-common/certific
import { EnrollmentType } from "@app/services/certificate-profile/certificate-profile-types";
import { validateTemplateRegexField } from "@app/services/certificate-template/certificate-template-validators";
import { booleanSchema } from "../sanitizedSchemas";
interface CertificateRequestForService {
commonName?: string;
keyUsages?: CertKeyUsageType[];
@@ -87,7 +89,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
)
.optional(),
signatureAlgorithm: z.nativeEnum(CertSignatureAlgorithm),
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
removeRootsFromChain: booleanSchema.default(false).optional()
})
.refine(validateTtlAndDateFields, {
message:
@@ -131,7 +134,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
profileId: req.body.profileId,
certificateRequest: mappedCertificateRequest
certificateRequest: mappedCertificateRequest,
removeRootsFromChain: req.body.removeRootsFromChain
});
await server.services.auditLog.createAuditLog({
@@ -171,7 +175,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
.min(1, "TTL cannot be empty")
.refine((val) => ms(val) > 0, "TTL must be a positive number"),
notBefore: validateCaDateField.optional(),
notAfter: validateCaDateField.optional()
notAfter: validateCaDateField.optional(),
removeRootsFromChain: booleanSchema.default(false).optional()
})
.refine(validateTtlAndDateFields, {
message:
@@ -206,7 +211,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
},
notBefore: req.body.notBefore ? new Date(req.body.notBefore) : undefined,
notAfter: req.body.notAfter ? new Date(req.body.notAfter) : undefined,
enrollmentType: EnrollmentType.API
enrollmentType: EnrollmentType.API,
removeRootsFromChain: req.body.removeRootsFromChain
});
await server.services.auditLog.createAuditLog({
@@ -262,7 +268,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
notAfter: validateCaDateField.optional(),
commonName: validateTemplateRegexField.optional(),
signatureAlgorithm: z.nativeEnum(CertSignatureAlgorithm),
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
removeRootsFromChain: booleanSchema.default(false).optional()
})
.refine(validateTtlAndDateFields, {
message:
@@ -325,7 +332,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
notAfter: req.body.notAfter ? new Date(req.body.notAfter) : undefined,
signatureAlgorithm: req.body.signatureAlgorithm,
keyAlgorithm: req.body.keyAlgorithm
}
},
removeRootsFromChain: req.body.removeRootsFromChain
});
await server.services.auditLog.createAuditLog({
@@ -357,6 +365,11 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
params: z.object({
certificateId: z.string().uuid()
}),
body: z
.object({
removeRootsFromChain: booleanSchema.default(false).optional()
})
.optional(),
response: {
200: z.object({
certificate: z.string().trim(),
@@ -375,7 +388,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
certificateId: req.params.certificateId
certificateId: req.params.certificateId,
removeRootsFromChain: req.body?.removeRootsFromChain
});
await server.services.auditLog.createAuditLog({

View File

@@ -170,7 +170,8 @@ const PKI_APP_CONNECTIONS = [
AppConnection.AWS,
AppConnection.Cloudflare,
AppConnection.AzureADCS,
AppConnection.AzureKeyVault
AppConnection.AzureKeyVault,
AppConnection.Chef
];
export const listAppConnectionOptions = (projectType?: ProjectType) => {

View File

@@ -196,3 +196,62 @@ export const convertExtendedKeyUsageArrayToLegacy = (
): CertExtendedKeyUsage[] | undefined => {
return usages?.map(convertToLegacyExtendedKeyUsage);
};
/**
* Parses a PEM-formatted certificate chain and returns individual certificates
* @param certificateChain - PEM-formatted certificate chain
* @returns Array of individual PEM certificates
*/
const parseCertificateChain = (certificateChain: string): string[] => {
if (!certificateChain || typeof certificateChain !== "string") {
return [];
}
const certRegex = new RE2(/-----BEGIN CERTIFICATE-----[\s\S]*?-----END CERTIFICATE-----/g);
const certificates = certificateChain.match(certRegex);
return certificates ? certificates.map((cert) => cert.trim()) : [];
};
/**
* Removes the root CA certificate from a certificate chain, leaving only intermediate certificates.
* If the chain contains only the root CA certificate, returns an empty string.
*
* @param certificateChain - PEM-formatted certificate chain containing leaf + intermediates + root CA
* @returns PEM-formatted certificate chain with only intermediate certificates (no root CA)
*/
export const removeRootCaFromChain = (certificateChain?: string): string => {
if (!certificateChain || typeof certificateChain !== "string") {
return "";
}
const certificates = parseCertificateChain(certificateChain);
if (certificates.length === 0) {
return "";
}
const intermediateCerts = certificates.slice(0, -1);
return intermediateCerts.join("\n");
};
/**
* Extracts the root CA certificate from a certificate chain.
*
* @param certificateChain - PEM-formatted certificate chain containing leaf + intermediates + root CA
* @returns PEM-formatted root CA certificate, or empty string if not found
*/
export const extractRootCaFromChain = (certificateChain?: string): string => {
if (!certificateChain || typeof certificateChain !== "string") {
return "";
}
const certificates = parseCertificateChain(certificateChain);
if (certificates.length === 0) {
return "";
}
return certificates[certificates.length - 1];
};

View File

@@ -428,7 +428,13 @@ describe("CertificateProfileService", () => {
service.createProfile({
...mockActor,
projectId: "project-123",
data: validProfileData
data: {
...validProfileData,
enrollmentType: EnrollmentType.ACME,
acmeConfig: {},
apiConfig: undefined,
estConfig: undefined
}
})
).rejects.toThrowError(
new BadRequestError({

View File

@@ -213,7 +213,7 @@ export const certificateProfileServiceFactory = ({
throw new NotFoundError({ message: "Project not found" });
}
const plan = await licenseService.getPlan(project.orgId);
if (!plan.pkiAcme) {
if (!plan.pkiAcme && data.enrollmentType === EnrollmentType.ACME) {
throw new BadRequestError({
message: "Failed to create certificate profile: Plan restriction. Upgrade plan to continue"
});

View File

@@ -47,7 +47,8 @@ import {
convertKeyUsageArrayFromLegacy,
convertKeyUsageArrayToLegacy,
mapEnumsForValidation,
normalizeDateForApi
normalizeDateForApi,
removeRootCaFromChain
} from "../certificate-common/certificate-utils";
import { TCertificateSyncDALFactory } from "../certificate-sync/certificate-sync-dal";
import { TPkiSyncDALFactory } from "../pki-sync/pki-sync-dal";
@@ -366,7 +367,8 @@ export const certificateV3ServiceFactory = ({
actor,
actorId,
actorAuthMethod,
actorOrgId
actorOrgId,
removeRootsFromChain
}: TIssueCertificateFromProfileDTO): Promise<TCertificateFromProfileResponse> => {
const profile = await validateProfileAndPermissions(
profileId,
@@ -480,10 +482,15 @@ export const certificateV3ServiceFactory = ({
renewBeforeDays: finalRenewBeforeDays
});
let finalCertificateChain = bufferToString(certificateChain);
if (removeRootsFromChain) {
finalCertificateChain = removeRootCaFromChain(finalCertificateChain);
}
return {
certificate: bufferToString(certificate),
issuingCaCertificate: bufferToString(issuingCaCertificate),
certificateChain: bufferToString(certificateChain),
certificateChain: finalCertificateChain,
privateKey: bufferToString(privateKey),
serialNumber,
certificateId: cert.id,
@@ -503,7 +510,8 @@ export const certificateV3ServiceFactory = ({
actorId,
actorAuthMethod,
actorOrgId,
enrollmentType
enrollmentType,
removeRootsFromChain
}: TSignCertificateFromProfileDTO): Promise<Omit<TCertificateFromProfileResponse, "privateKey">> => {
const profile = await validateProfileAndPermissions(
profileId,
@@ -590,7 +598,10 @@ export const certificateV3ServiceFactory = ({
});
const certificateString = extractCertificateFromBuffer(certificate as unknown as Buffer);
const certificateChainString = extractCertificateFromBuffer(certificateChain as unknown as Buffer);
let certificateChainString = extractCertificateFromBuffer(certificateChain as unknown as Buffer);
if (removeRootsFromChain) {
certificateChainString = removeRootCaFromChain(certificateChainString);
}
return {
certificate: certificateString,
@@ -610,7 +621,8 @@ export const certificateV3ServiceFactory = ({
actor,
actorId,
actorAuthMethod,
actorOrgId
actorOrgId,
removeRootsFromChain
}: TOrderCertificateFromProfileDTO): Promise<TCertificateOrderResponse> => {
const profile = await validateProfileAndPermissions(
profileId,
@@ -665,7 +677,8 @@ export const certificateV3ServiceFactory = ({
actor,
actorId,
actorAuthMethod,
actorOrgId
actorOrgId,
removeRootsFromChain
});
const orderId = randomUUID();
@@ -703,7 +716,8 @@ export const certificateV3ServiceFactory = ({
actorId,
actorAuthMethod,
actorOrgId,
internal = false
internal = false,
removeRootsFromChain
}: TRenewCertificateDTO & { internal?: boolean }): Promise<TCertificateFromProfileResponse> => {
const renewalResult = await certificateDAL.transaction(async (tx) => {
const originalCert = await certificateDAL.findById(certificateId, tx);
@@ -929,10 +943,14 @@ export const certificateV3ServiceFactory = ({
pkiSyncQueue
});
let finalCertificateChain = renewalResult.certificateChain;
if (removeRootsFromChain) {
finalCertificateChain = removeRootCaFromChain(finalCertificateChain);
}
return {
certificate: renewalResult.certificate,
issuingCaCertificate: renewalResult.issuingCaCertificate,
certificateChain: renewalResult.certificateChain,
certificateChain: finalCertificateChain,
serialNumber: renewalResult.serialNumber,
certificateId: renewalResult.newCert.id,
projectId: renewalResult.profile.projectId,

View File

@@ -26,6 +26,7 @@ export type TIssueCertificateFromProfileDTO = {
signatureAlgorithm?: string;
keyAlgorithm?: string;
};
removeRootsFromChain?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TSignCertificateFromProfileDTO = {
@@ -37,6 +38,7 @@ export type TSignCertificateFromProfileDTO = {
notBefore?: Date;
notAfter?: Date;
enrollmentType: EnrollmentType;
removeRootsFromChain?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TOrderCertificateFromProfileDTO = {
@@ -57,6 +59,7 @@ export type TOrderCertificateFromProfileDTO = {
signatureAlgorithm?: string;
keyAlgorithm?: string;
};
removeRootsFromChain?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TCertificateFromProfileResponse = {
@@ -101,6 +104,7 @@ export type TCertificateOrderResponse = {
export type TRenewCertificateDTO = {
certificateId: string;
removeRootsFromChain?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateRenewalConfigDTO = {

View File

@@ -18,7 +18,6 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
.where(filter)
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
.select(selectAllTableCols(TableName.IdentityAccessToken))
.select(db.ref("name").withSchema(TableName.Identity))
.select(db.ref("orgId").withSchema(TableName.Identity).as("identityScopeOrgId"))
.first();

View File

@@ -38,6 +38,7 @@ import {
TAttachTokenAuthDTO,
TCreateTokenAuthTokenDTO,
TGetTokenAuthDTO,
TGetTokenAuthTokenByIdDTO,
TGetTokenAuthTokensDTO,
TRevokeTokenAuthDTO,
TRevokeTokenAuthTokenDTO,
@@ -618,6 +619,52 @@ export const identityTokenAuthServiceFactory = ({
return { tokens, identityMembershipOrg };
};
const getTokenAuthTokenById = async ({
tokenId,
identityId,
isActorSuperAdmin,
actorId,
actor,
actorAuthMethod,
actorOrgId
}: TGetTokenAuthTokenByIdDTO) => {
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
const identityMembershipOrg = await membershipIdentityDAL.getIdentityById({
scopeData: {
scope: AccessScope.Organization,
orgId: actorOrgId
},
identityId
});
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) {
throw new BadRequestError({
message: "The identity does not have Token Auth"
});
}
const { permission } = await permissionService.getOrgPermission({
scope: OrganizationActionScope.Any,
actor,
actorId,
orgId: identityMembershipOrg.scopeOrgId,
actorAuthMethod,
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
const token = await identityAccessTokenDAL.findOne({
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH,
[`${TableName.IdentityAccessToken}.identityId` as "identityId"]: identityId
});
if (!token) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
return { token, identityMembershipOrg };
};
const updateTokenAuthToken = async ({
tokenId,
name,
@@ -797,6 +844,7 @@ export const identityTokenAuthServiceFactory = ({
revokeIdentityTokenAuth,
createTokenAuthToken,
getTokenAuthTokens,
getTokenAuthTokenById,
updateTokenAuthToken,
revokeTokenAuthToken
};

View File

@@ -40,6 +40,12 @@ export type TGetTokenAuthTokensDTO = {
isActorSuperAdmin?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TGetTokenAuthTokenByIdDTO = {
tokenId: string;
identityId: string;
isActorSuperAdmin?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateTokenAuthTokenDTO = {
tokenId: string;
name?: string;

View File

@@ -93,6 +93,7 @@ export const membershipGroupServiceFactory = ({
}
const scopeDatabaseFields = factory.getScopeDatabaseFields(dto.scopeData);
await factory.onCreateMembershipGroupGuard(dto);
const customInputRoles = data.roles.filter((el) => factory.isCustomRole(el.role));
@@ -112,6 +113,19 @@ export const membershipGroupServiceFactory = ({
const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug);
const membership = await membershipGroupDAL.transaction(async (tx) => {
const existingMembership = await membershipGroupDAL.findOne(
{
scope: scopeData.scope,
...scopeDatabaseFields,
actorGroupId: dto.data.groupId
},
tx
);
if (existingMembership)
throw new BadRequestError({
message: "Group is already a member"
});
const doc = await membershipGroupDAL.create(
{
scope: scopeData.scope,

View File

@@ -105,6 +105,19 @@ export const membershipIdentityServiceFactory = ({
const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug);
const membership = await membershipIdentityDAL.transaction(async (tx) => {
const existingMembership = await membershipIdentityDAL.findOne(
{
scope: scopeData.scope,
...scopeDatabaseFields,
actorIdentityId: dto.data.identityId
},
tx
);
if (existingMembership)
throw new BadRequestError({
message: "Identity is already a member"
});
const doc = await membershipIdentityDAL.create(
{
scope: scopeData.scope,

View File

@@ -1,7 +1,8 @@
import crypto from "crypto";
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { getConfig } from "@app/lib/config/env";
import { LicenseType } from "@app/ee/services/license/license-types";
import { BadRequestError } from "@app/lib/errors";
import { TOfflineUsageReportDALFactory } from "./offline-usage-report-dal";
@@ -30,10 +31,13 @@ export const offlineUsageReportServiceFactory = ({
};
const generateUsageReportCSV = async () => {
const cfg = getConfig();
if (!cfg.LICENSE_KEY_OFFLINE) {
const licenseKeyConfig = getLicenseKeyConfig();
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
if (!hasOfflineLicense) {
throw new BadRequestError({
message: "Offline usage reports are not enabled. LICENSE_KEY_OFFLINE must be configured."
message:
"Offline usage reports are not enabled. Usage reports are only available for self-hosted offline instances"
});
}

View File

@@ -14,6 +14,7 @@ export const AwsCertificateManagerPkiSyncConfigSchema = z.object({
const AwsCertificateManagerPkiSyncOptionsSchema = z.object({
canImportCertificates: z.boolean().default(false),
canRemoveCertificates: z.boolean().default(true),
includeRootCa: z.boolean().default(false),
preserveArn: z.boolean().default(true),
certificateNameSchema: z
.string()

View File

@@ -0,0 +1,71 @@
import RE2 from "re2";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
/**
* AWS Secrets Manager naming constraints for secrets
*/
export const AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING = {
/**
* Regular expression pattern for valid AWS Secrets Manager secret names
* Must contain only alphanumeric characters, hyphens, and underscores
* Must be 1-512 characters long
*/
NAME_PATTERN: new RE2("^[\\w-]+$"),
/**
* String of characters that are forbidden in AWS Secrets Manager secret names
*/
FORBIDDEN_CHARACTERS: " @#$%^&*()+=[]{}|;':\"<>?,./",
/**
* Minimum length for secret names in AWS Secrets Manager
*/
MIN_LENGTH: 1,
/**
* Maximum length for secret names in AWS Secrets Manager
*/
MAX_LENGTH: 512,
/**
* String representation of the allowed character pattern (for UI display)
*/
ALLOWED_CHARACTER_PATTERN: "^[\\w-]+$"
} as const;
export const AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS = {
INFISICAL_PREFIX: "infisical-",
DEFAULT_ENVIRONMENT: "production",
DEFAULT_CERTIFICATE_NAME_SCHEMA: "infisical-{{certificateId}}",
DEFAULT_FIELD_MAPPINGS: {
certificate: "certificate",
privateKey: "private_key",
certificateChain: "certificate_chain",
caCertificate: "ca_certificate"
}
};
export const AWS_SECRETS_MANAGER_PKI_SYNC_OPTIONS = {
DEFAULT_CAN_REMOVE_CERTIFICATES: true,
DEFAULT_PRESERVE_SECRET_ON_RENEWAL: true,
DEFAULT_UPDATE_EXISTING_CERTIFICATES: true,
DEFAULT_CAN_IMPORT_CERTIFICATES: false
};
/**
* AWS Secrets Manager PKI Sync list option configuration
*/
export const AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION = {
name: "AWS Secrets Manager" as const,
connection: AppConnection.AWS,
destination: PkiSync.AwsSecretsManager,
canImportCertificates: false,
canRemoveCertificates: true,
defaultCertificateNameSchema: "infisical-{{certificateId}}",
forbiddenCharacters: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS,
allowedCharacterPattern: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.ALLOWED_CHARACTER_PATTERN,
maxCertificateNameLength: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH,
minCertificateNameLength: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH
} as const;

View File

@@ -0,0 +1,555 @@
/* eslint-disable no-continue */
/* eslint-disable no-await-in-loop */
import {
CreateSecretCommand,
DeleteSecretCommand,
ListSecretsCommand,
SecretsManagerClient,
UpdateSecretCommand
} from "@aws-sdk/client-secrets-manager";
import RE2 from "re2";
import { TCertificateSyncs } from "@app/db/schemas";
import { CustomAWSHasher } from "@app/lib/aws/hashing";
import { crypto } from "@app/lib/crypto";
import { logger } from "@app/lib/logger";
import { AWSRegion } from "@app/services/app-connection/app-connection-enums";
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
import { TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateSyncDALFactory } from "@app/services/certificate-sync/certificate-sync-dal";
import { CertificateSyncStatus } from "@app/services/certificate-sync/certificate-sync-enums";
import { createConnectionQueue, RateLimitConfig } from "@app/services/connection-queue";
import { matchesCertificateNameSchema } from "@app/services/pki-sync/pki-sync-fns";
import { TCertificateMap, TPkiSyncWithCredentials } from "@app/services/pki-sync/pki-sync-types";
import { AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS } from "./aws-secrets-manager-pki-sync-constants";
import {
AwsSecretsManagerCertificateSecret,
SyncCertificatesResult,
TAwsSecretsManagerPkiSyncWithCredentials
} from "./aws-secrets-manager-pki-sync-types";
const AWS_SECRETS_MANAGER_RATE_LIMIT_CONFIG: RateLimitConfig = {
MAX_CONCURRENT_REQUESTS: 10,
BASE_DELAY: 1000,
MAX_DELAY: 30000,
MAX_RETRIES: 3,
RATE_LIMIT_STATUS_CODES: [429, 503]
};
const awsSecretsManagerConnectionQueue = createConnectionQueue(AWS_SECRETS_MANAGER_RATE_LIMIT_CONFIG);
const { withRateLimitRetry } = awsSecretsManagerConnectionQueue;
const MAX_RETRIES = 10;
const sleep = async () =>
new Promise((resolve) => {
setTimeout(resolve, 1000);
});
const isInfisicalManagedCertificate = (secretName: string, pkiSync: TPkiSyncWithCredentials): boolean => {
const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined;
const certificateNameSchema = syncOptions?.certificateNameSchema;
if (certificateNameSchema) {
const environment = AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.DEFAULT_ENVIRONMENT;
return matchesCertificateNameSchema(secretName, environment, certificateNameSchema);
}
return secretName.startsWith(AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX);
};
const parseErrorMessage = (error: unknown): string => {
if (error instanceof Error) {
return error.message;
}
if (typeof error === "string") {
return error;
}
if (error && typeof error === "object" && "message" in error) {
const { message } = error as { message: unknown };
if (typeof message === "string") {
return message;
}
}
return "Unknown error occurred";
};
const getSecretsManagerClient = async (pkiSync: TAwsSecretsManagerPkiSyncWithCredentials) => {
const { destinationConfig, connection } = pkiSync;
const config = await getAwsConnectionConfig(
connection as TAwsConnectionConfig,
destinationConfig.region as AWSRegion
);
if (!config.credentials) {
throw new Error("AWS credentials not found in connection configuration");
}
const secretsManagerClient = new SecretsManagerClient({
region: config.region,
useFipsEndpoint: crypto.isFipsModeEnabled(),
sha256: CustomAWSHasher,
credentials: config.credentials
});
return secretsManagerClient;
};
type TAwsSecretsManagerPkiSyncFactoryDeps = {
certificateDAL: Pick<TCertificateDALFactory, "findById">;
certificateSyncDAL: Pick<
TCertificateSyncDALFactory,
| "removeCertificates"
| "addCertificates"
| "findByPkiSyncAndCertificate"
| "updateById"
| "findByPkiSyncId"
| "updateSyncStatus"
>;
};
export const awsSecretsManagerPkiSyncFactory = ({
certificateDAL,
certificateSyncDAL
}: TAwsSecretsManagerPkiSyncFactoryDeps) => {
const $getSecretsManagerSecrets = async (
pkiSync: TAwsSecretsManagerPkiSyncWithCredentials,
syncId = "unknown"
): Promise<Record<string, string>> => {
const client = await getSecretsManagerClient(pkiSync);
const secrets: Record<string, string> = {};
let hasNext = true;
let nextToken: string | undefined;
let attempt = 0;
while (hasNext) {
try {
const currentToken = nextToken;
const output = await withRateLimitRetry(
() => client.send(new ListSecretsCommand({ NextToken: currentToken })),
{
operation: "list-secrets-manager-secrets",
syncId
}
);
attempt = 0;
if (output.SecretList) {
output.SecretList.forEach((secretEntry) => {
if (
secretEntry.Name &&
isInfisicalManagedCertificate(secretEntry.Name, pkiSync as unknown as TPkiSyncWithCredentials)
) {
secrets[secretEntry.Name] = secretEntry.ARN || secretEntry.Name;
}
});
}
hasNext = Boolean(output.NextToken);
nextToken = output.NextToken;
} catch (e) {
if (
e &&
typeof e === "object" &&
"name" in e &&
(e as { name: string }).name === "ThrottlingException" &&
attempt < MAX_RETRIES
) {
attempt += 1;
await sleep();
continue;
}
throw e;
}
}
return secrets;
};
const syncCertificates = async (
pkiSync: TPkiSyncWithCredentials,
certificateMap: TCertificateMap
): Promise<SyncCertificatesResult> => {
const awsPkiSync = pkiSync as unknown as TAwsSecretsManagerPkiSyncWithCredentials;
const client = await getSecretsManagerClient(awsPkiSync);
const existingSecrets = await $getSecretsManagerSecrets(awsPkiSync, pkiSync.id);
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id);
const syncRecordsByCertId = new Map<string, TCertificateSyncs>();
const syncRecordsByExternalId = new Map<string, TCertificateSyncs>();
existingSyncRecords.forEach((record: TCertificateSyncs) => {
if (record.certificateId) {
syncRecordsByCertId.set(record.certificateId, record);
}
if (record.externalIdentifier) {
syncRecordsByExternalId.set(record.externalIdentifier, record);
}
});
type CertificateUploadData = {
secretName: string;
certificateData: AwsSecretsManagerCertificateSecret;
certificateId: string;
isUpdate: boolean;
targetSecretName: string;
oldCertificateIdToRemove?: string;
};
const setCertificates: CertificateUploadData[] = [];
const validationErrors: Array<{ name: string; error: string }> = [];
const syncOptions = pkiSync.syncOptions as
| {
canRemoveCertificates?: boolean;
preserveSecretOnRenewal?: boolean;
fieldMappings?: {
certificate?: string;
privateKey?: string;
certificateChain?: string;
caCertificate?: string;
};
certificateNameSchema?: string;
}
| undefined;
const canRemoveCertificates = syncOptions?.canRemoveCertificates ?? true;
const preserveSecretOnRenewal = syncOptions?.preserveSecretOnRenewal ?? true;
const fieldMappings = {
certificate: syncOptions?.fieldMappings?.certificate ?? "certificate",
privateKey: syncOptions?.fieldMappings?.privateKey ?? "private_key",
certificateChain: syncOptions?.fieldMappings?.certificateChain ?? "certificate_chain",
caCertificate: syncOptions?.fieldMappings?.caCertificate ?? "ca_certificate"
};
const activeExternalIdentifiers = new Set<string>();
for (const [certName, certData] of Object.entries(certificateMap)) {
const { cert, privateKey: certPrivateKey, certificateChain, caCertificate, certificateId } = certData;
if (!cert || cert.trim().length === 0) {
validationErrors.push({
name: certName,
error: "Certificate content is empty or missing"
});
continue;
}
if (!certPrivateKey || certPrivateKey.trim().length === 0) {
validationErrors.push({
name: certName,
error: "Private key content is empty or missing"
});
continue;
}
if (!certificateId || typeof certificateId !== "string") {
continue;
}
const certificateData: AwsSecretsManagerCertificateSecret = {
[fieldMappings.certificate]: cert,
[fieldMappings.privateKey]: certPrivateKey
};
if (certificateChain && certificateChain.trim().length > 0) {
certificateData[fieldMappings.certificateChain] = certificateChain;
}
if (caCertificate && typeof caCertificate === "string" && caCertificate.trim().length > 0) {
certificateData[fieldMappings.caCertificate] = caCertificate;
}
let targetSecretName = certName;
if (syncOptions?.certificateNameSchema) {
const extendedCertData = certData as Record<string, unknown>;
const safeCommonName = typeof extendedCertData.commonName === "string" ? extendedCertData.commonName : "";
targetSecretName = syncOptions.certificateNameSchema
.replace(new RE2("\\{\\{certificateId\\}\\}", "g"), certificateId)
.replace(new RE2("\\{\\{commonName\\}\\}", "g"), safeCommonName);
} else {
targetSecretName = `${AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX}${certificateId}`;
}
const certificate = await certificateDAL.findById(certificateId);
if (certificate?.renewedByCertificateId) {
continue;
}
const syncRecordLookupId = certificate?.renewedFromCertificateId || certificateId;
const existingRecord = syncRecordsByCertId.get(syncRecordLookupId);
let shouldProcess = true;
let isUpdate = false;
if (existingRecord?.externalIdentifier) {
const existingSecret = existingSecrets[existingRecord.externalIdentifier];
if (existingSecret) {
if (certificate?.renewedFromCertificateId && preserveSecretOnRenewal) {
targetSecretName = existingRecord.externalIdentifier;
isUpdate = true;
} else if (certificate?.renewedFromCertificateId && !preserveSecretOnRenewal) {
activeExternalIdentifiers.add(existingRecord.externalIdentifier);
} else if (!certificate?.renewedFromCertificateId) {
activeExternalIdentifiers.add(existingRecord.externalIdentifier);
shouldProcess = false;
}
}
}
if (!shouldProcess) {
continue;
}
if (existingSecrets[targetSecretName]) {
isUpdate = true;
}
activeExternalIdentifiers.add(targetSecretName);
setCertificates.push({
secretName: certName,
certificateData,
certificateId,
isUpdate,
targetSecretName,
oldCertificateIdToRemove:
certificate?.renewedFromCertificateId && preserveSecretOnRenewal
? certificate.renewedFromCertificateId
: undefined
});
}
const result: SyncCertificatesResult = {
uploaded: 0,
updated: 0,
removed: 0,
failedRemovals: 0,
skipped: 0,
details: {
failedUploads: [],
failedRemovals: [],
validationErrors
}
};
for (const certData of setCertificates) {
const { secretName, certificateData, certificateId, isUpdate, targetSecretName, oldCertificateIdToRemove } =
certData;
try {
const secretValue = JSON.stringify(certificateData);
const configKeyId: unknown = awsPkiSync.destinationConfig.keyId;
const keyId: string = typeof configKeyId === "string" ? configKeyId : "alias/aws/secretsmanager";
if (isUpdate) {
await withRateLimitRetry(
() =>
client.send(
new UpdateSecretCommand({
SecretId: targetSecretName,
SecretString: secretValue,
KmsKeyId: keyId
})
),
{
operation: "update-secret",
syncId: pkiSync.id
}
);
result.updated += 1;
} else {
await withRateLimitRetry(
() =>
client.send(
new CreateSecretCommand({
Name: targetSecretName,
SecretString: secretValue,
KmsKeyId: keyId,
Description: `Certificate managed by Infisical`
})
),
{
operation: "create-secret",
syncId: pkiSync.id
}
);
result.uploaded += 1;
}
const existingRecord = syncRecordsByCertId.get(certificateId);
if (existingRecord?.id) {
await certificateSyncDAL.updateById(existingRecord.id, {
externalIdentifier: targetSecretName,
syncStatus: CertificateSyncStatus.Succeeded,
lastSyncedAt: new Date(),
lastSyncMessage: "Certificate successfully synced to AWS Secrets Manager"
});
if (oldCertificateIdToRemove && oldCertificateIdToRemove !== certificateId) {
await certificateSyncDAL.removeCertificates(pkiSync.id, [oldCertificateIdToRemove]);
}
} else {
await certificateSyncDAL.addCertificates(pkiSync.id, [
{
certificateId,
externalIdentifier: targetSecretName
}
]);
const newCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId);
if (newCertSync?.id) {
await certificateSyncDAL.updateById(newCertSync.id, {
syncStatus: CertificateSyncStatus.Succeeded,
lastSyncedAt: new Date(),
lastSyncMessage: "Certificate successfully synced to AWS Secrets Manager"
});
}
}
} catch (error) {
result.details?.failedUploads?.push({
name: secretName,
error: parseErrorMessage(error)
});
logger.error(
{
secretName,
certificateId,
error: parseErrorMessage(error),
pkiSyncId: pkiSync.id
},
"Failed to sync certificate"
);
const existingRecord = syncRecordsByCertId.get(certificateId);
if (existingRecord?.id) {
await certificateSyncDAL.updateById(existingRecord.id, {
syncStatus: CertificateSyncStatus.Failed,
lastSyncMessage: parseErrorMessage(error)
});
}
}
}
if (canRemoveCertificates) {
for (const [secretName] of Object.entries(existingSecrets)) {
if (!activeExternalIdentifiers.has(secretName)) {
try {
await withRateLimitRetry(
() =>
client.send(
new DeleteSecretCommand({
SecretId: secretName,
ForceDeleteWithoutRecovery: true
})
),
{
operation: "delete-secret",
syncId: pkiSync.id
}
);
result.removed += 1;
} catch (error) {
result.failedRemovals += 1;
result.details?.failedRemovals?.push({
name: secretName,
error: parseErrorMessage(error)
});
logger.error(
{
secretName,
error: parseErrorMessage(error),
pkiSyncId: pkiSync.id
},
"Failed to remove certificate secret"
);
}
}
}
}
return result;
};
const removeCertificates = async (
pkiSync: TPkiSyncWithCredentials,
certificateMap: TCertificateMap
): Promise<{ removed: number; failed: number }> => {
const awsPkiSync = pkiSync as unknown as TAwsSecretsManagerPkiSyncWithCredentials;
const client = await getSecretsManagerClient(awsPkiSync);
const existingSecrets = await $getSecretsManagerSecrets(awsPkiSync, pkiSync.id);
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id);
let removed = 0;
let failed = 0;
for (const [, certData] of Object.entries(certificateMap)) {
if (!certData.certificateId) continue;
const syncRecord = existingSyncRecords.find((record) => record.certificateId === certData.certificateId);
if (!syncRecord?.externalIdentifier) continue;
const secretName = syncRecord.externalIdentifier;
if (existingSecrets[secretName]) {
try {
await withRateLimitRetry(
() =>
client.send(
new DeleteSecretCommand({
SecretId: secretName,
ForceDeleteWithoutRecovery: true
})
),
{
operation: "delete-secret",
syncId: pkiSync.id
}
);
if (syncRecord.id) {
await certificateSyncDAL.updateById(syncRecord.id, {
syncStatus: CertificateSyncStatus.Failed
});
}
removed += 1;
} catch (error) {
failed += 1;
logger.error(
{
secretName,
certificateId: certData.certificateId,
error: parseErrorMessage(error),
pkiSyncId: pkiSync.id
},
"Failed to remove certificate secret"
);
}
}
}
return { removed, failed };
};
return {
syncCertificates,
removeCertificates
};
};
export type TAwsSecretsManagerPkiSyncFactory = ReturnType<typeof awsSecretsManagerPkiSyncFactory>;

View File

@@ -0,0 +1,104 @@
import RE2 from "re2";
import { z } from "zod";
import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums";
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
import { PkiSyncSchema } from "@app/services/pki-sync/pki-sync-schemas";
import { AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING } from "./aws-secrets-manager-pki-sync-constants";
export const AwsSecretsManagerPkiSyncConfigSchema = z.object({
region: z.nativeEnum(AWSRegion),
keyId: z.string().trim().optional()
});
export const AwsSecretsManagerFieldMappingsSchema = z.object({
certificate: z.string().min(1, "Certificate field name is required").default("certificate"),
privateKey: z.string().min(1, "Private key field name is required").default("private_key"),
certificateChain: z.string().min(1, "Certificate chain field name is required").default("certificate_chain"),
caCertificate: z.string().min(1, "CA certificate field name is required").default("ca_certificate")
});
const AwsSecretsManagerPkiSyncOptionsSchema = z.object({
canImportCertificates: z.boolean().default(false),
canRemoveCertificates: z.boolean().default(true),
includeRootCa: z.boolean().default(false),
preserveSecretOnRenewal: z.boolean().default(true),
updateExistingCertificates: z.boolean().default(true),
certificateNameSchema: z
.string()
.optional()
.refine(
(schema) => {
if (!schema) return true;
if (!schema.includes("{{certificateId}}")) {
return false;
}
const testName = schema
.replace(new RE2("\\{\\{certificateId\\}\\}", "g"), "test-cert-id")
.replace(new RE2("\\{\\{profileId\\}\\}", "g"), "test-profile-id")
.replace(new RE2("\\{\\{commonName\\}\\}", "g"), "test-common-name")
.replace(new RE2("\\{\\{friendlyName\\}\\}", "g"), "test-friendly-name")
.replace(new RE2("\\{\\{environment\\}\\}", "g"), "test-env");
const hasForbiddenChars = AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS.split("").some(
(char) => testName.includes(char)
);
return (
AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.NAME_PATTERN.test(testName) &&
!hasForbiddenChars &&
testName.length >= AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH &&
testName.length <= AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH
);
},
{
message:
"Certificate name schema must include {{certificateId}} placeholder and result in names that contain only alphanumeric characters, underscores, and hyphens and be 1-512 characters long for AWS Secrets Manager."
}
),
fieldMappings: AwsSecretsManagerFieldMappingsSchema.optional().default({
certificate: "certificate",
privateKey: "private_key",
certificateChain: "certificate_chain",
caCertificate: "ca_certificate"
})
});
export const AwsSecretsManagerPkiSyncSchema = PkiSyncSchema.extend({
destination: z.literal(PkiSync.AwsSecretsManager),
destinationConfig: AwsSecretsManagerPkiSyncConfigSchema,
syncOptions: AwsSecretsManagerPkiSyncOptionsSchema
});
export const CreateAwsSecretsManagerPkiSyncSchema = z.object({
name: z.string().trim().min(1).max(64),
description: z.string().optional(),
isAutoSyncEnabled: z.boolean().default(true),
destinationConfig: AwsSecretsManagerPkiSyncConfigSchema,
syncOptions: AwsSecretsManagerPkiSyncOptionsSchema.optional().default({}),
subscriberId: z.string().nullish(),
connectionId: z.string(),
projectId: z.string().trim().min(1),
certificateIds: z.array(z.string().uuid()).optional()
});
export const UpdateAwsSecretsManagerPkiSyncSchema = z.object({
name: z.string().trim().min(1).max(64).optional(),
description: z.string().optional(),
isAutoSyncEnabled: z.boolean().optional(),
destinationConfig: AwsSecretsManagerPkiSyncConfigSchema.optional(),
syncOptions: AwsSecretsManagerPkiSyncOptionsSchema.optional(),
subscriberId: z.string().nullish(),
connectionId: z.string().optional()
});
export const AwsSecretsManagerPkiSyncListItemSchema = z.object({
name: z.literal("AWS Secrets Manager"),
connection: z.literal(AppConnection.AWS),
destination: z.literal(PkiSync.AwsSecretsManager),
canImportCertificates: z.literal(false),
canRemoveCertificates: z.literal(true)
});

View File

@@ -0,0 +1,59 @@
import { z } from "zod";
import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types";
import {
AwsSecretsManagerFieldMappingsSchema,
AwsSecretsManagerPkiSyncConfigSchema,
AwsSecretsManagerPkiSyncSchema,
CreateAwsSecretsManagerPkiSyncSchema,
UpdateAwsSecretsManagerPkiSyncSchema
} from "./aws-secrets-manager-pki-sync-schemas";
export type TAwsSecretsManagerPkiSyncConfig = z.infer<typeof AwsSecretsManagerPkiSyncConfigSchema>;
export type TAwsSecretsManagerFieldMappings = z.infer<typeof AwsSecretsManagerFieldMappingsSchema>;
export type TAwsSecretsManagerPkiSync = z.infer<typeof AwsSecretsManagerPkiSyncSchema>;
export type TAwsSecretsManagerPkiSyncInput = z.infer<typeof CreateAwsSecretsManagerPkiSyncSchema>;
export type TAwsSecretsManagerPkiSyncUpdate = z.infer<typeof UpdateAwsSecretsManagerPkiSyncSchema>;
export type TAwsSecretsManagerPkiSyncWithCredentials = TAwsSecretsManagerPkiSync & {
connection: TAwsConnection;
appConnectionName: string;
appConnectionApp: string;
};
export interface AwsSecretsManagerCertificateSecret {
[key: string]: string;
}
export interface SyncCertificatesResult {
uploaded: number;
updated: number;
removed: number;
failedRemovals: number;
skipped: number;
details?: {
failedUploads?: Array<{ name: string; error: string }>;
failedRemovals?: Array<{ name: string; error: string }>;
validationErrors?: Array<{ name: string; error: string }>;
};
}
export interface RemoveCertificatesResult {
removed: number;
failed: number;
skipped: number;
}
export interface CertificateImportRequest {
name: string;
certificate: string;
privateKey: string;
certificateChain?: string;
caCertificate?: string;
certificateId?: string;
}

View File

@@ -0,0 +1,4 @@
export * from "./aws-secrets-manager-pki-sync-constants";
export * from "./aws-secrets-manager-pki-sync-fns";
export * from "./aws-secrets-manager-pki-sync-schemas";
export * from "./aws-secrets-manager-pki-sync-types";

View File

@@ -14,6 +14,7 @@ export const AzureKeyVaultPkiSyncConfigSchema = z.object({
const AzureKeyVaultPkiSyncOptionsSchema = z.object({
canImportCertificates: z.boolean().default(false),
canRemoveCertificates: z.boolean().default(true),
includeRootCa: z.boolean().default(false),
enableVersioning: z.boolean().default(true),
certificateNameSchema: z
.string()

View File

@@ -0,0 +1,23 @@
import RE2 from "re2";
export const CHEF_PKI_SYNC_CERTIFICATE_NAMING = {
NAME_PATTERN: new RE2("^[a-zA-Z0-9_-]+$"),
FORBIDDEN_CHARACTERS: "[]{}()<>|\\:;\"'=+*&^%$#@!~`?/",
MIN_LENGTH: 1,
MAX_LENGTH: 255,
DEFAULT_SCHEMA: "{{certificateId}}"
};
export const CHEF_PKI_SYNC_DATA_BAG_NAMING = {
NAME_PATTERN: new RE2("^[a-zA-Z0-9_-]+$"),
FORBIDDEN_CHARACTERS: "[]{}()<>|\\:;\"'=+*&^%$#@!~`?/.",
MIN_LENGTH: 1,
MAX_LENGTH: 255
};
export const CHEF_PKI_SYNC_DEFAULTS = {
CERTIFICATE_DATA_BAG: "ssl_certificates",
ITEM_NAME_TEMPLATE: "{{certificateId}}",
INFISICAL_PREFIX: "Infisical-",
DEFAULT_ENVIRONMENT: "global"
} as const;

View File

@@ -0,0 +1,595 @@
/* eslint-disable no-continue */
/* eslint-disable no-await-in-loop */
import { TCertificateSyncs } from "@app/db/schemas";
import {
createChefDataBagItem,
listChefDataBagItems,
removeChefDataBagItem,
updateChefDataBagItem
} from "@app/ee/services/app-connections/chef";
import { TChefDataBagItemContent } from "@app/ee/services/secret-sync/chef";
import { logger } from "@app/lib/logger";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateSyncDALFactory } from "@app/services/certificate-sync/certificate-sync-dal";
import { CertificateSyncStatus } from "@app/services/certificate-sync/certificate-sync-enums";
import { createConnectionQueue, RateLimitConfig } from "@app/services/connection-queue";
import { matchesCertificateNameSchema } from "@app/services/pki-sync/pki-sync-fns";
import { TCertificateMap, TPkiSyncWithCredentials } from "@app/services/pki-sync/pki-sync-types";
import { CHEF_PKI_SYNC_DEFAULTS } from "./chef-pki-sync-constants";
import { ChefCertificateDataBagItem, SyncCertificatesResult, TChefPkiSyncWithCredentials } from "./chef-pki-sync-types";
const CHEF_RATE_LIMIT_CONFIG: RateLimitConfig = {
MAX_CONCURRENT_REQUESTS: 5, // Chef servers generally have lower rate limits
BASE_DELAY: 1500,
MAX_DELAY: 30000,
MAX_RETRIES: 3,
RATE_LIMIT_STATUS_CODES: [429, 503]
};
const chefConnectionQueue = createConnectionQueue(CHEF_RATE_LIMIT_CONFIG);
const { withRateLimitRetry } = chefConnectionQueue;
const isInfisicalManagedCertificate = (certificateName: string, pkiSync: TPkiSyncWithCredentials): boolean => {
const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined;
const certificateNameSchema = syncOptions?.certificateNameSchema;
if (certificateNameSchema) {
const environment = CHEF_PKI_SYNC_DEFAULTS.DEFAULT_ENVIRONMENT;
return matchesCertificateNameSchema(certificateName, environment, certificateNameSchema);
}
return certificateName.startsWith(CHEF_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX);
};
const parseErrorMessage = (error: unknown): string => {
if (error instanceof Error) {
return error.message;
}
if (typeof error === "string") {
return error;
}
if (error && typeof error === "object" && "message" in error) {
const { message } = error as { message: unknown };
if (typeof message === "string") {
return message;
}
}
return "Unknown error occurred";
};
type TChefPkiSyncFactoryDeps = {
certificateDAL: Pick<TCertificateDALFactory, "findById">;
certificateSyncDAL: Pick<
TCertificateSyncDALFactory,
| "removeCertificates"
| "addCertificates"
| "findByPkiSyncAndCertificate"
| "updateById"
| "findByPkiSyncId"
| "updateSyncStatus"
>;
};
export const chefPkiSyncFactory = ({ certificateDAL, certificateSyncDAL }: TChefPkiSyncFactoryDeps) => {
const $getChefDataBagItems = async (
pkiSync: TChefPkiSyncWithCredentials,
syncId = "unknown"
): Promise<Record<string, boolean>> => {
const {
connection,
destinationConfig: { dataBagName }
} = pkiSync;
const { serverUrl, userName, privateKey, orgName } = connection.credentials;
const dataBagItems = await withRateLimitRetry(
() =>
listChefDataBagItems(
{
credentials: { serverUrl, userName, privateKey, orgName }
} as Parameters<typeof listChefDataBagItems>[0],
dataBagName
),
{
operation: "list-chef-data-bag-items",
syncId
}
);
const chefDataBagItems: Record<string, boolean> = {};
dataBagItems.forEach((item) => {
chefDataBagItems[item.name] = true;
});
return chefDataBagItems;
};
const syncCertificates = async (
pkiSync: TPkiSyncWithCredentials,
certificateMap: TCertificateMap
): Promise<SyncCertificatesResult> => {
const chefPkiSync = pkiSync as unknown as TChefPkiSyncWithCredentials;
const {
connection,
destinationConfig: { dataBagName }
} = chefPkiSync;
const { serverUrl, userName, privateKey, orgName } = connection.credentials;
const chefDataBagItems = await $getChefDataBagItems(chefPkiSync, pkiSync.id);
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id);
const syncRecordsByCertId = new Map<string, TCertificateSyncs>();
const syncRecordsByExternalId = new Map<string, TCertificateSyncs>();
existingSyncRecords.forEach((record: TCertificateSyncs) => {
if (record.certificateId) {
syncRecordsByCertId.set(record.certificateId, record);
}
if (record.externalIdentifier) {
syncRecordsByExternalId.set(record.externalIdentifier, record);
}
});
type CertificateUploadData = {
key: string;
name: string;
cert: string;
privateKey: string;
certificateChain?: string;
caCertificate?: string;
certificateId: string;
isUpdate: boolean;
targetItemName: string;
oldCertificateIdToRemove?: string;
};
const setCertificates: CertificateUploadData[] = [];
const validationErrors: Array<{ name: string; error: string }> = [];
const syncOptions = pkiSync.syncOptions as
| {
canRemoveCertificates?: boolean;
preserveItemOnRenewal?: boolean;
fieldMappings?: {
certificate?: string;
privateKey?: string;
certificateChain?: string;
caCertificate?: string;
metadata?: string;
};
}
| undefined;
const canRemoveCertificates = syncOptions?.canRemoveCertificates ?? true;
const preserveItemOnRenewal = syncOptions?.preserveItemOnRenewal ?? true;
const fieldMappings = {
certificate: syncOptions?.fieldMappings?.certificate ?? "certificate",
privateKey: syncOptions?.fieldMappings?.privateKey ?? "private_key",
certificateChain: syncOptions?.fieldMappings?.certificateChain ?? "certificate_chain",
caCertificate: syncOptions?.fieldMappings?.caCertificate ?? "ca_certificate"
};
const activeExternalIdentifiers = new Set<string>();
for (const [certName, certData] of Object.entries(certificateMap)) {
const { cert, privateKey: certPrivateKey, certificateChain, caCertificate, certificateId } = certData;
if (!cert || cert.trim().length === 0) {
validationErrors.push({
name: certName,
error: "Certificate content is empty or missing"
});
continue;
}
if (!certPrivateKey || certPrivateKey.trim().length === 0) {
validationErrors.push({
name: certName,
error: "Private key content is empty or missing"
});
continue;
}
if (!certificateId || typeof certificateId !== "string") {
continue;
}
const targetCertificateName = certName;
const certificate = await certificateDAL.findById(certificateId);
if (certificate?.renewedByCertificateId) {
continue;
}
const syncRecordLookupId = certificate?.renewedFromCertificateId || certificateId;
const existingSyncRecord = syncRecordsByCertId.get(syncRecordLookupId);
let shouldProcess = true;
let isUpdate = false;
let targetItemName = targetCertificateName;
if (existingSyncRecord?.externalIdentifier) {
const existingChefItem = chefDataBagItems[existingSyncRecord.externalIdentifier];
if (existingChefItem) {
if (certificate?.renewedFromCertificateId && preserveItemOnRenewal) {
targetItemName = existingSyncRecord.externalIdentifier;
isUpdate = true;
} else if (!certificate?.renewedFromCertificateId) {
shouldProcess = false;
}
}
}
if (!shouldProcess) {
continue;
}
setCertificates.push({
key: certName,
name: certName,
cert,
privateKey: certPrivateKey,
certificateChain,
caCertificate,
certificateId,
isUpdate,
targetItemName,
oldCertificateIdToRemove:
certificate?.renewedFromCertificateId && preserveItemOnRenewal
? certificate.renewedFromCertificateId
: undefined
});
activeExternalIdentifiers.add(targetItemName);
}
type UploadResult =
| { status: "fulfilled"; certificate: CertificateUploadData }
| { status: "rejected"; certificate: CertificateUploadData; error: unknown };
const uploadPromises = setCertificates.map(async (certificateData): Promise<UploadResult> => {
const {
targetItemName,
cert,
privateKey: certPrivateKey,
certificateChain,
caCertificate,
certificateId
} = certificateData;
try {
const chefDataBagItem: ChefCertificateDataBagItem = {
id: targetItemName,
[fieldMappings.certificate]: cert,
[fieldMappings.privateKey]: certPrivateKey,
...(certificateChain && { [fieldMappings.certificateChain]: certificateChain }),
...(caCertificate && { [fieldMappings.caCertificate]: caCertificate })
};
const itemExists = chefDataBagItems[targetItemName] === true;
if (itemExists) {
await withRateLimitRetry(
() =>
updateChefDataBagItem({
serverUrl,
userName,
privateKey,
orgName,
dataBagName,
dataBagItemName: targetItemName,
data: chefDataBagItem as unknown as TChefDataBagItemContent
}),
{
operation: "update-chef-data-bag-item",
syncId: pkiSync.id
}
);
} else {
await withRateLimitRetry(
() =>
createChefDataBagItem({
serverUrl,
userName,
privateKey,
orgName,
dataBagName,
data: chefDataBagItem as unknown as TChefDataBagItemContent
}),
{
operation: "create-chef-data-bag-item",
syncId: pkiSync.id
}
);
}
return { status: "fulfilled" as const, certificate: certificateData };
} catch (error) {
logger.error(
{
syncId: pkiSync.id,
certificateId,
targetItemName,
error: error instanceof Error ? error.message : String(error)
},
"Failed to sync certificate to Chef"
);
return { status: "rejected" as const, certificate: certificateData, error };
}
});
const uploadResults = await Promise.allSettled(uploadPromises);
const successfulUploads = uploadResults.filter(
(result): result is PromiseFulfilledResult<UploadResult> =>
result.status === "fulfilled" && result.value.status === "fulfilled"
);
const failedUploads = uploadResults.filter(
(
result
): result is
| PromiseRejectedResult
| PromiseFulfilledResult<{ status: "rejected"; certificate: CertificateUploadData; error: unknown }> =>
result.status === "rejected" || (result.status === "fulfilled" && result.value.status === "rejected")
);
let removedCount = 0;
let failedRemovals: Array<{ name: string; error: string }> = [];
if (canRemoveCertificates) {
const itemsToRemove: string[] = [];
Object.keys(chefDataBagItems).forEach((itemName) => {
if (!activeExternalIdentifiers.has(itemName) && isInfisicalManagedCertificate(itemName, pkiSync)) {
itemsToRemove.push(itemName);
}
});
if (itemsToRemove.length > 0) {
const removalPromises = itemsToRemove.map(async (itemName) => {
try {
await withRateLimitRetry(
() =>
removeChefDataBagItem({
serverUrl,
userName,
privateKey,
orgName,
dataBagName,
dataBagItemName: itemName
}),
{
operation: "remove-chef-data-bag-item",
syncId: pkiSync.id
}
);
const syncRecord = syncRecordsByExternalId.get(itemName);
if (syncRecord?.certificateId) {
await certificateSyncDAL.removeCertificates(pkiSync.id, [syncRecord.certificateId]);
}
return { status: "fulfilled" as const, itemName };
} catch (error) {
logger.error(
{
syncId: pkiSync.id,
itemName,
error: error instanceof Error ? error.message : String(error)
},
"Failed to remove Chef data bag item"
);
return { status: "rejected" as const, itemName, error };
}
});
const removalResults = await Promise.allSettled(removalPromises);
const successfulRemovals = removalResults.filter(
(result): result is PromiseFulfilledResult<{ status: "fulfilled"; itemName: string }> =>
result.status === "fulfilled" && result.value.status === "fulfilled"
);
removedCount = successfulRemovals.length;
const failedRemovalPromises = removalResults.filter(
(
result
): result is
| PromiseRejectedResult
| PromiseFulfilledResult<{ status: "rejected"; itemName: string; error: unknown }> =>
result.status === "rejected" || (result.status === "fulfilled" && result.value.status === "rejected")
);
failedRemovals = failedRemovalPromises.map((result) => {
if (result.status === "rejected") {
return {
name: "unknown",
error: parseErrorMessage(result.reason)
};
}
const { itemName, error } = result.value;
return {
name: String(itemName),
error: parseErrorMessage(error)
};
});
}
}
for (const result of successfulUploads) {
const { certificateId, targetItemName, oldCertificateIdToRemove } = result.value.certificate;
if (certificateId && typeof certificateId === "string") {
const existingCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId);
if (existingCertSync) {
await certificateSyncDAL.updateById(existingCertSync.id, {
externalIdentifier: targetItemName,
syncStatus: CertificateSyncStatus.Succeeded,
lastSyncedAt: new Date(),
lastSyncMessage: "Certificate successfully synced to destination"
});
} else {
await certificateSyncDAL.addCertificates(pkiSync.id, [
{
certificateId,
externalIdentifier: targetItemName
}
]);
const newCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId);
if (newCertSync) {
await certificateSyncDAL.updateById(newCertSync.id, {
syncStatus: CertificateSyncStatus.Succeeded,
lastSyncedAt: new Date(),
lastSyncMessage: "Certificate successfully synced to destination"
});
}
}
if (oldCertificateIdToRemove) {
await certificateSyncDAL.removeCertificates(pkiSync.id, [oldCertificateIdToRemove]);
}
}
}
await Promise.all(
failedUploads.map(async (result) => {
let certificateId: string;
let errorMessage: string;
if (result.status === "rejected") {
certificateId = "unknown";
errorMessage = result.reason instanceof Error ? result.reason.message : String(result.reason);
return;
}
const { certificate, error } = result.value;
certificateId = certificate.certificateId;
errorMessage = error instanceof Error ? error.message : String(error);
const existingSyncRecord = syncRecordsByCertId.get(certificateId);
if (existingSyncRecord) {
await certificateSyncDAL.updateSyncStatus(
pkiSync.id,
certificateId,
CertificateSyncStatus.Failed,
errorMessage
);
}
})
);
return {
uploaded: successfulUploads.filter((result) => !result.value.certificate.isUpdate).length,
updated: successfulUploads.filter((result) => result.value.certificate.isUpdate).length,
removed: removedCount,
failedRemovals: failedRemovals.length,
skipped: validationErrors.length,
details: {
failedUploads: failedUploads.map((result) => {
if (result.status === "rejected") {
return {
name: "unknown",
error: result.reason instanceof Error ? result.reason.message : String(result.reason)
};
}
const { certificate, error } = result.value;
return {
name: certificate.name,
error: error instanceof Error ? error.message : String(error)
};
}),
failedRemovals,
validationErrors
}
};
};
const importCertificates = async (): Promise<SyncCertificatesResult> => {
throw new Error("Chef PKI Sync does not support importing certificates from Chef data bags");
};
const removeCertificates = async (
sync: TPkiSyncWithCredentials,
certificateNames: string[],
deps?: { certificateSyncDAL?: TCertificateSyncDALFactory; certificateMap?: TCertificateMap }
): Promise<void> => {
const chefPkiSync = sync as unknown as TChefPkiSyncWithCredentials;
const {
connection,
destinationConfig: { dataBagName }
} = chefPkiSync;
const { serverUrl, userName, privateKey, orgName } = connection.credentials;
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(sync.id);
const certificateIdsToRemove: string[] = [];
const itemsToRemove: string[] = [];
for (const certName of certificateNames) {
const certificateData = deps?.certificateMap?.[certName];
if (certificateData?.certificateId && typeof certificateData.certificateId === "string") {
const syncRecord = existingSyncRecords.find((record) => record.certificateId === certificateData.certificateId);
if (syncRecord) {
certificateIdsToRemove.push(certificateData.certificateId);
if (syncRecord.externalIdentifier) {
itemsToRemove.push(syncRecord.externalIdentifier);
}
}
} else {
const targetName = certName;
const syncRecord = existingSyncRecords.find((record) => record.externalIdentifier === targetName);
if (syncRecord && syncRecord.certificateId) {
certificateIdsToRemove.push(syncRecord.certificateId);
itemsToRemove.push(targetName);
}
}
}
const removalPromises = itemsToRemove.map(async (itemName) => {
try {
await withRateLimitRetry(
() =>
removeChefDataBagItem({
serverUrl,
userName,
privateKey,
orgName,
dataBagName,
dataBagItemName: itemName
}),
{
operation: "remove-chef-data-bag-item",
syncId: sync.id
}
);
} catch (error) {
logger.error(
{
syncId: sync.id,
itemName,
error: error instanceof Error ? error.message : String(error)
},
"Failed to remove Chef data bag item during certificate removal"
);
}
});
await Promise.allSettled(removalPromises);
if (certificateIdsToRemove.length > 0) {
await certificateSyncDAL.removeCertificates(sync.id, certificateIdsToRemove);
}
};
return {
syncCertificates,
importCertificates,
removeCertificates
};
};

View File

@@ -0,0 +1,10 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
export const CHEF_PKI_SYNC_LIST_OPTION = {
name: "Chef" as const,
connection: AppConnection.Chef,
destination: PkiSync.Chef,
canImportCertificates: false,
canRemoveCertificates: true
} as const;

View File

@@ -0,0 +1,113 @@
import RE2 from "re2";
import { z } from "zod";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
import { PkiSyncSchema } from "@app/services/pki-sync/pki-sync-schemas";
import { CHEF_PKI_SYNC_CERTIFICATE_NAMING, CHEF_PKI_SYNC_DATA_BAG_NAMING } from "./chef-pki-sync-constants";
export const ChefPkiSyncConfigSchema = z.object({
dataBagName: z
.string()
.trim()
.min(1, "Data bag name required")
.max(255, "Data bag name cannot exceed 255 characters")
.refine(
(name) => CHEF_PKI_SYNC_DATA_BAG_NAMING.NAME_PATTERN.test(name),
"Data bag name can only contain alphanumeric characters, underscores, and hyphens"
)
});
const ChefFieldMappingsSchema = z.object({
certificate: z.string().min(1, "Certificate field name is required").default("certificate"),
privateKey: z.string().min(1, "Private key field name is required").default("private_key"),
certificateChain: z.string().min(1, "Certificate chain field name is required").default("certificate_chain"),
caCertificate: z.string().min(1, "CA certificate field name is required").default("ca_certificate")
});
const ChefPkiSyncOptionsSchema = z.object({
canImportCertificates: z.boolean().default(false),
canRemoveCertificates: z.boolean().default(true),
includeRootCa: z.boolean().default(false),
preserveItemOnRenewal: z.boolean().default(true),
updateExistingCertificates: z.boolean().default(true),
certificateNameSchema: z
.string()
.optional()
.refine(
(schema) => {
if (!schema) return true;
if (!schema.includes("{{certificateId}}")) {
return false;
}
const testName = schema
.replace(new RE2("\\{\\{certificateId\\}\\}", "g"), "test-cert-id")
.replace(new RE2("\\{\\{profileId\\}\\}", "g"), "test-profile-id")
.replace(new RE2("\\{\\{commonName\\}\\}", "g"), "test-common-name")
.replace(new RE2("\\{\\{friendlyName\\}\\}", "g"), "test-friendly-name")
.replace(new RE2("\\{\\{environment\\}\\}", "g"), "test-env");
const hasForbiddenChars = CHEF_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS.split("").some((char) =>
testName.includes(char)
);
return (
CHEF_PKI_SYNC_CERTIFICATE_NAMING.NAME_PATTERN.test(testName) &&
!hasForbiddenChars &&
testName.length >= CHEF_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH &&
testName.length <= CHEF_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH
);
},
{
message:
"Certificate item name schema must include {{certificateId}} placeholder and result in names that contain only alphanumeric characters, underscores, and hyphens and be 1-255 characters long for Chef data bag items."
}
),
fieldMappings: ChefFieldMappingsSchema.optional().default({
certificate: "certificate",
privateKey: "private_key",
certificateChain: "certificate_chain",
caCertificate: "ca_certificate"
})
});
export const ChefPkiSyncSchema = PkiSyncSchema.extend({
destination: z.literal(PkiSync.Chef),
destinationConfig: ChefPkiSyncConfigSchema,
syncOptions: ChefPkiSyncOptionsSchema
});
export const CreateChefPkiSyncSchema = z.object({
name: z.string().trim().min(1).max(64),
description: z.string().optional(),
isAutoSyncEnabled: z.boolean().default(true),
destinationConfig: ChefPkiSyncConfigSchema,
syncOptions: ChefPkiSyncOptionsSchema.optional().default({}),
subscriberId: z.string().nullish(),
connectionId: z.string(),
projectId: z.string().trim().min(1),
certificateIds: z.array(z.string().uuid()).optional()
});
export const UpdateChefPkiSyncSchema = z.object({
name: z.string().trim().min(1).max(64).optional(),
description: z.string().optional(),
isAutoSyncEnabled: z.boolean().optional(),
destinationConfig: ChefPkiSyncConfigSchema.optional(),
syncOptions: ChefPkiSyncOptionsSchema.optional(),
subscriberId: z.string().nullish(),
connectionId: z.string().optional()
});
export const ChefPkiSyncListItemSchema = z.object({
name: z.literal("Chef"),
connection: z.literal(AppConnection.Chef),
destination: z.literal(PkiSync.Chef),
canImportCertificates: z.literal(false),
canRemoveCertificates: z.literal(true)
});
export { ChefFieldMappingsSchema };

View File

@@ -0,0 +1,59 @@
import { z } from "zod";
import { TChefConnection } from "@app/ee/services/app-connections/chef/chef-connection-types";
import {
ChefFieldMappingsSchema,
ChefPkiSyncConfigSchema,
ChefPkiSyncSchema,
CreateChefPkiSyncSchema,
UpdateChefPkiSyncSchema
} from "./chef-pki-sync-schemas";
export type TChefPkiSyncConfig = z.infer<typeof ChefPkiSyncConfigSchema>;
export type TChefFieldMappings = z.infer<typeof ChefFieldMappingsSchema>;
export type TChefPkiSync = z.infer<typeof ChefPkiSyncSchema>;
export type TChefPkiSyncInput = z.infer<typeof CreateChefPkiSyncSchema>;
export type TChefPkiSyncUpdate = z.infer<typeof UpdateChefPkiSyncSchema>;
export type TChefPkiSyncWithCredentials = TChefPkiSync & {
connection: TChefConnection;
};
export interface ChefCertificateDataBagItem {
id: string;
[key: string]: string;
}
export interface SyncCertificatesResult {
uploaded: number;
updated: number;
removed: number;
failedRemovals: number;
skipped: number;
details?: {
failedUploads?: Array<{ name: string; error: string }>;
failedRemovals?: Array<{ name: string; error: string }>;
validationErrors?: Array<{ name: string; error: string }>;
};
}
export interface RemoveCertificatesResult {
removed: number;
failed: number;
skipped: number;
}
export interface CertificateImportRequest {
id: string;
name: string;
certificate: string;
privateKey: string;
certificateChain?: string;
alternativeNames?: string[];
certificateId?: string;
}

View File

@@ -0,0 +1,4 @@
export * from "./chef-pki-sync-constants";
export * from "./chef-pki-sync-fns";
export * from "./chef-pki-sync-schemas";
export * from "./chef-pki-sync-types";

View File

@@ -1,6 +1,8 @@
export enum PkiSync {
AzureKeyVault = "azure-key-vault",
AwsCertificateManager = "aws-certificate-manager"
AwsCertificateManager = "aws-certificate-manager",
AwsSecretsManager = "aws-secrets-manager",
Chef = "chef"
}
export enum PkiSyncStatus {

View File

@@ -10,8 +10,12 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION } from "./aws-certificate-manager/aws-certificate-manager-pki-sync-constants";
import { awsCertificateManagerPkiSyncFactory } from "./aws-certificate-manager/aws-certificate-manager-pki-sync-fns";
import { AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION } from "./aws-secrets-manager/aws-secrets-manager-pki-sync-constants";
import { awsSecretsManagerPkiSyncFactory } from "./aws-secrets-manager/aws-secrets-manager-pki-sync-fns";
import { AZURE_KEY_VAULT_PKI_SYNC_LIST_OPTION } from "./azure-key-vault/azure-key-vault-pki-sync-constants";
import { azureKeyVaultPkiSyncFactory } from "./azure-key-vault/azure-key-vault-pki-sync-fns";
import { chefPkiSyncFactory } from "./chef/chef-pki-sync-fns";
import { CHEF_PKI_SYNC_LIST_OPTION } from "./chef/chef-pki-sync-list-constants";
import { PkiSync } from "./pki-sync-enums";
import { TCertificateMap, TPkiSyncWithCredentials } from "./pki-sync-types";
@@ -19,7 +23,9 @@ const ENTERPRISE_PKI_SYNCS: PkiSync[] = [];
const PKI_SYNC_LIST_OPTIONS = {
[PkiSync.AzureKeyVault]: AZURE_KEY_VAULT_PKI_SYNC_LIST_OPTION,
[PkiSync.AwsCertificateManager]: AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION
[PkiSync.AwsCertificateManager]: AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION,
[PkiSync.AwsSecretsManager]: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION,
[PkiSync.Chef]: CHEF_PKI_SYNC_LIST_OPTION
};
export const enterprisePkiSyncCheck = async (
@@ -162,6 +168,8 @@ export const PkiSyncFns = {
dependencies: {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
certificateDAL: TCertificateDALFactory;
certificateSyncDAL: TCertificateSyncDALFactory;
}
): Promise<TCertificateMap> => {
switch (pkiSync.destination) {
@@ -175,6 +183,14 @@ export const PkiSyncFns = {
"AWS Certificate Manager does not support importing certificates into Infisical (private keys cannot be extracted)"
);
}
case PkiSync.AwsSecretsManager: {
throw new Error("AWS Secrets Manager does not support importing certificates into Infisical");
}
case PkiSync.Chef: {
throw new Error(
"Chef does not support importing certificates into Infisical (private keys cannot be extracted securely)"
);
}
default:
throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`);
}
@@ -203,7 +219,7 @@ export const PkiSyncFns = {
}> => {
switch (pkiSync.destination) {
case PkiSync.AzureKeyVault: {
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault);
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault as PkiSync);
const azureKeyVaultPkiSync = azureKeyVaultPkiSyncFactory({
appConnectionDAL: dependencies.appConnectionDAL,
kmsService: dependencies.kmsService,
@@ -213,7 +229,7 @@ export const PkiSyncFns = {
return azureKeyVaultPkiSync.syncCertificates(pkiSync, certificateMap);
}
case PkiSync.AwsCertificateManager: {
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager);
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager as PkiSync);
const awsCertificateManagerPkiSync = awsCertificateManagerPkiSyncFactory({
appConnectionDAL: dependencies.appConnectionDAL,
kmsService: dependencies.kmsService,
@@ -222,6 +238,22 @@ export const PkiSyncFns = {
});
return awsCertificateManagerPkiSync.syncCertificates(pkiSync, certificateMap);
}
case PkiSync.AwsSecretsManager: {
checkPkiSyncDestination(pkiSync, PkiSync.AwsSecretsManager as PkiSync);
const awsSecretsManagerPkiSync = awsSecretsManagerPkiSyncFactory({
certificateDAL: dependencies.certificateDAL,
certificateSyncDAL: dependencies.certificateSyncDAL
});
return awsSecretsManagerPkiSync.syncCertificates(pkiSync, certificateMap);
}
case PkiSync.Chef: {
checkPkiSyncDestination(pkiSync, PkiSync.Chef as PkiSync);
const chefPkiSync = chefPkiSyncFactory({
certificateDAL: dependencies.certificateDAL,
certificateSyncDAL: dependencies.certificateSyncDAL
});
return chefPkiSync.syncCertificates(pkiSync, certificateMap);
}
default:
throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`);
}
@@ -240,7 +272,7 @@ export const PkiSyncFns = {
): Promise<void> => {
switch (pkiSync.destination) {
case PkiSync.AzureKeyVault: {
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault);
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault as PkiSync);
const azureKeyVaultPkiSync = azureKeyVaultPkiSyncFactory({
appConnectionDAL: dependencies.appConnectionDAL,
kmsService: dependencies.kmsService,
@@ -254,7 +286,7 @@ export const PkiSyncFns = {
break;
}
case PkiSync.AwsCertificateManager: {
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager);
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager as PkiSync);
const awsCertificateManagerPkiSync = awsCertificateManagerPkiSyncFactory({
appConnectionDAL: dependencies.appConnectionDAL,
kmsService: dependencies.kmsService,
@@ -267,6 +299,27 @@ export const PkiSyncFns = {
});
break;
}
case PkiSync.AwsSecretsManager: {
checkPkiSyncDestination(pkiSync, PkiSync.AwsSecretsManager as PkiSync);
const awsSecretsManagerPkiSync = awsSecretsManagerPkiSyncFactory({
certificateDAL: dependencies.certificateDAL,
certificateSyncDAL: dependencies.certificateSyncDAL
});
await awsSecretsManagerPkiSync.removeCertificates(pkiSync, dependencies.certificateMap);
break;
}
case PkiSync.Chef: {
checkPkiSyncDestination(pkiSync, PkiSync.Chef as PkiSync);
const chefPkiSync = chefPkiSyncFactory({
certificateDAL: dependencies.certificateDAL,
certificateSyncDAL: dependencies.certificateSyncDAL
});
await chefPkiSync.removeCertificates(pkiSync, certificateNames, {
certificateSyncDAL: dependencies.certificateSyncDAL,
certificateMap: dependencies.certificateMap
});
break;
}
default:
throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`);
}

View File

@@ -4,10 +4,14 @@ import { PkiSync } from "./pki-sync-enums";
export const PKI_SYNC_NAME_MAP: Record<PkiSync, string> = {
[PkiSync.AzureKeyVault]: "Azure Key Vault",
[PkiSync.AwsCertificateManager]: "AWS Certificate Manager"
[PkiSync.AwsCertificateManager]: "AWS Certificate Manager",
[PkiSync.AwsSecretsManager]: "AWS Secrets Manager",
[PkiSync.Chef]: "Chef"
};
export const PKI_SYNC_CONNECTION_MAP: Record<PkiSync, AppConnection> = {
[PkiSync.AzureKeyVault]: AppConnection.AzureKeyVault,
[PkiSync.AwsCertificateManager]: AppConnection.AWS
[PkiSync.AwsCertificateManager]: AppConnection.AWS,
[PkiSync.AwsSecretsManager]: AppConnection.AWS,
[PkiSync.Chef]: AppConnection.Chef
};

View File

@@ -26,6 +26,7 @@ import { TCertificateSecretDALFactory } from "../certificate/certificate-secret-
import { TCertificateAuthorityCertDALFactory } from "../certificate-authority/certificate-authority-cert-dal";
import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal";
import { getCaCertChain } from "../certificate-authority/certificate-authority-fns";
import { extractRootCaFromChain, removeRootCaFromChain } from "../certificate-common/certificate-utils";
import { TCertificateSyncDALFactory } from "../certificate-sync/certificate-sync-dal";
import { CertificateSyncStatus } from "../certificate-sync/certificate-sync-enums";
import { TPkiSyncDALFactory } from "./pki-sync-dal";
@@ -180,11 +181,16 @@ export const pkiSyncQueueFactory = ({
(cert, index, self) => self.findIndex((c) => c.id === cert.id) === index
);
if (uniqueCertificates.length === 0) {
const activeCertificates = uniqueCertificates.filter((cert) => {
const typedCert = cert as TCertificates;
return !typedCert.renewedByCertificateId;
});
if (activeCertificates.length === 0) {
return { certificateMap, certificateMetadata };
}
certificates = uniqueCertificates;
certificates = activeCertificates;
for (const certificate of certificates) {
const cert = certificate as TCertificates;
@@ -231,13 +237,15 @@ export const pkiSyncQueueFactory = ({
}
let certificateChain: string | undefined;
let caCertificate: string | undefined;
try {
if (certBody.encryptedCertificateChain) {
const decryptedCertChain = await kmsDecryptor({
cipherTextBlob: certBody.encryptedCertificateChain
});
certificateChain = decryptedCertChain.toString();
} else if (certificate.caCertId) {
}
if (certificate.caCertId) {
const { caCert, caCertChain } = await getCaCertChain({
caCertId: certificate.caCertId,
certificateAuthorityDAL,
@@ -245,7 +253,10 @@ export const pkiSyncQueueFactory = ({
projectDAL,
kmsService
});
certificateChain = `${caCert}\n${caCertChain}`.trim();
if (!certBody.encryptedCertificateChain) {
certificateChain = `${caCert}\n${caCertChain}`.trim();
}
caCertificate = certificateChain ? extractRootCaFromChain(certificateChain) : caCert;
}
} catch (chainError) {
logger.warn(
@@ -254,10 +265,16 @@ export const pkiSyncQueueFactory = ({
);
// Continue without certificate chain
certificateChain = undefined;
caCertificate = undefined;
}
let certificateName: string;
const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined;
const syncOptions = pkiSync.syncOptions as
| {
certificateNameSchema?: string;
includeRootCa?: boolean;
}
| undefined;
const certificateNameSchema = syncOptions?.certificateNameSchema;
if (certificateNameSchema) {
@@ -289,10 +306,16 @@ export const pkiSyncQueueFactory = ({
alternativeNames.push(originalLegacyName);
}
let processedCertificateChain = certificateChain;
if (certificateChain && syncOptions?.includeRootCa === false) {
processedCertificateChain = removeRootCaFromChain(certificateChain);
}
certificateMap[certificateName] = {
cert: certificatePem,
privateKey: certPrivateKey || "",
certificateChain,
certificateChain: processedCertificateChain,
caCertificate,
alternativeNames,
certificateId: certificate.id
};

View File

@@ -7,6 +7,7 @@ import { PkiSync } from "./pki-sync-enums";
export const PkiSyncOptionsSchema = z.object({
canImportCertificates: z.boolean(),
canRemoveCertificates: z.boolean().optional(),
includeRootCa: z.boolean().optional().default(false),
certificateNameSchema: z
.string()
.optional()

View File

@@ -73,7 +73,14 @@ export type TPkiSyncListItem = TPkiSync & {
export type TCertificateMap = Record<
string,
{ cert: string; privateKey: string; certificateChain?: string; alternativeNames?: string[]; certificateId?: string }
{
cert: string;
privateKey: string;
certificateChain?: string;
caCertificate?: string;
alternativeNames?: string[];
certificateId?: string;
}
>;
export type TCreatePkiSyncDTO = {

Some files were not shown because too many files have changed in this diff Show More