diff --git a/.gitignore b/.gitignore index b4e9a07c2f..0ad950da30 100644 --- a/.gitignore +++ b/.gitignore @@ -74,3 +74,4 @@ cli/test/infisical-merge backend/bdd/.bdd-infisical-bootstrap-result.json /npm/bin +__pycache__ diff --git a/.infisicalignore b/.infisicalignore index ec1cbfe167..7a07a95045 100644 --- a/.infisicalignore +++ b/.infisicalignore @@ -54,4 +54,6 @@ k8-operator/config/samples/universalAuthIdentitySecret.yaml:generic-api-key:8 docs/integrations/app-connections/redis.mdx:generic-api-key:80 backend/src/ee/services/app-connections/chef/chef-connection-fns.ts:private-key:42 docs/documentation/platform/pki/enrollment-methods/api.mdx:generic-api-key:93 -docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139 \ No newline at end of file +docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139 +docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx:private-key:62 +docs/documentation/platform/pki/certificate-syncs/chef.mdx:private-key:61 diff --git a/backend/bdd/features/environment.py b/backend/bdd/features/environment.py index 9a2e9f90b0..976998c72b 100644 --- a/backend/bdd/features/environment.py +++ b/backend/bdd/features/environment.py @@ -3,6 +3,7 @@ import os import pathlib import typing +from copy import deepcopy import httpx from behave.runner import Context @@ -185,28 +186,33 @@ def bootstrap_infisical(context: Context): def before_all(context: Context): + base_vars = { + "BASE_URL": BASE_URL, + "PEBBLE_URL": PEBBLE_URL, + } if BOOTSTRAP_INFISICAL: details = bootstrap_infisical(context) - context.vars = { - "BASE_URL": BASE_URL, - "PEBBLE_URL": PEBBLE_URL, + vars = base_vars | { "PROJECT_ID": details["project"]["id"], "CERT_CA_ID": details["ca"]["id"], "CERT_TEMPLATE_ID": details["cert_template"]["id"], "AUTH_TOKEN": details["auth_token"], } else: - context.vars = { - "BASE_URL": BASE_URL, - "PEBBLE_URL": PEBBLE_URL, + vars = base_vars | { "PROJECT_ID": PROJECT_ID, "CERT_CA_ID": CERT_CA_ID, "CERT_TEMPLATE_ID": CERT_TEMPLATE_ID, "AUTH_TOKEN": AUTH_TOKEN, } + context._initial_vars = vars context.http_client = httpx.Client(base_url=BASE_URL) +def before_scenario(context: Context, scenario: typing.Any): + context.vars = deepcopy(context._initial_vars) + + def after_scenario(context: Context, scenario: typing.Any): if hasattr(context, "web_server"): context.web_server.shutdown_and_server_close() diff --git a/backend/bdd/features/pki/acme/access-control.feature b/backend/bdd/features/pki/acme/access-control.feature index 6615d00f80..50588be765 100644 --- a/backend/bdd/features/pki/acme/access-control.feature +++ b/backend/bdd/features/pki/acme/access-control.feature @@ -221,7 +221,6 @@ Feature: Access Control | order | .authorizations[0].uri | auth_uri | {auth_uri} | | | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | {} | - Scenario Outline: URL mismatch Given I have an ACME cert profile as "acme_profile" When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" @@ -271,3 +270,52 @@ Feature: Access Control | order | .authorizations[0].uri | auth_uri | {auth_uri} | https://example.com/acmes/auths/FOOBAR | URL mismatch in the protected header | | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | BAD | Invalid URL in the protected header | | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | https://example.com/acmes/challenges/FOOBAR | URL mismatch in the protected header | + + Scenario Outline: Send KID and JWK in the same time + Given I have an ACME cert profile as "acme_profile" + When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" + Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account + And I memorize acme_account.uri with jq "capture("/(?[^/]+)$") | .id" as account_id + When I create certificate signing request as csr + Then I add names to certificate signing request csr + """ + { + "COMMON_NAME": "localhost" + } + """ + Then I create a RSA private key pair as cert_key + And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format + And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order + And I peak and memorize the next nonce as nonce_value + And I memorize with jq "" as + When I send a raw ACME request to "" + """ + { + "protected": { + "alg": "RS256", + "nonce": "{nonce_value}", + "url": "", + "kid": "{acme_account.uri}", + "jwk": { + "n": "mmEWxUv2lUYDZe_M2FXJ_WDXgHoEG7PVvg-dfz1STzyMwx0qvM66KMenXSyVA0r-_Ssb6p8VexSWGOFKskM4ryKUihn2KNH5e8nXZBqzqYeKQ8vqaCdaWzTxFI1dg0xhk0CWptkZHxpRpLalztFJ1Pq7L2qvQOM2YT7wPYbwQhpaSiVNXAb1W4FwAPyC04v1mHehvST-esaDT7j_5-eU5cCcmyi4_g5nBawcinOjj5o3VCg4X8UjK--AjhAyYHx1nRMr-7xk4x-0VIpQ_OODjLB3WzN8s1YEb0Jx5Bv1JyeCw35zahqs3fAFyRje-p5ENk9NCxfz5x9ZGkszkkNt0Q", + "e": "AQAB", + "kty": "RSA" + } + }, + "payload": {} + } + """ + Then the value response.status_code should be equal to 400 + And the value response with jq ".status" should be equal to 400 + And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:malformed" + And the value response with jq ".detail" should be equal to "Both JWK and KID are provided in the protected header" + + Examples: Endpoints + | src_var | jq | dest_var | url | + | order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/{account_id}/orders | + | order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order | + | order | . | not_used | {order.uri} | + | order | . | not_used | {order.uri}/finalize | + | order | . | not_used | {order.uri}/certificate | + | order | .authorizations[0].uri | auth_uri | {auth_uri} | + | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | diff --git a/backend/bdd/features/pki/acme/account.feature b/backend/bdd/features/pki/acme/account.feature index 589c5ab244..14e304c6ca 100644 --- a/backend/bdd/features/pki/acme/account.feature +++ b/backend/bdd/features/pki/acme/account.feature @@ -6,13 +6,32 @@ Feature: Account Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account And the value acme_account.uri with jq "." should match pattern {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/(.+) + Scenario: Create a new account with the same key pair twice + Given I have an ACME cert profile as "acme_profile" + When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" + Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account + And I memorize acme_account.uri as kid + And I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account2 + And the value error.__class__.__name__ should be equal to "ConflictError" + And the value error.location should be equal to "{kid}" + Scenario: Find an existing account Given I have an ACME cert profile as "acme_profile" When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account And I memorize acme_account.uri as account_uri - And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account - And the value acme_account.uri should be equal to "{account_uri}" + And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as retrieved_account + And the value retrieved_account.uri should be equal to "{account_uri}" + + # Note: This is a very special case for cert-manager. + Scenario: Create a new account with EAB then retrieve it without EAB + Given I have an ACME cert profile as "acme_profile" + When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" + Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account + And I memorize acme_account.uri as account_uri + And I find the existing ACME account without EAB as retrieved_account + And the value error with should be absent + And the value retrieved_account.uri should be equal to "{account_uri}" Scenario: Create a new account without EAB Given I have an ACME cert profile as "acme_profile" diff --git a/backend/bdd/features/pki/acme/dicrectory.feature b/backend/bdd/features/pki/acme/directory.feature similarity index 86% rename from backend/bdd/features/pki/acme/dicrectory.feature rename to backend/bdd/features/pki/acme/directory.feature index 664ff7457d..53084a6817 100644 --- a/backend/bdd/features/pki/acme/dicrectory.feature +++ b/backend/bdd/features/pki/acme/directory.feature @@ -9,6 +9,9 @@ Feature: Directory { "newNonce": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-nonce", "newAccount": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-account", - "newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order" + "newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order", + "meta": { + "externalAccountRequired": true + } } """ diff --git a/backend/bdd/features/steps/pki_acme.py b/backend/bdd/features/steps/pki_acme.py index 46b10c13e0..353ec942da 100644 --- a/backend/bdd/features/steps/pki_acme.py +++ b/backend/bdd/features/steps/pki_acme.py @@ -387,6 +387,9 @@ def register_account_with_eab( ): acme_client = context.acme_client account_public_key = acme_client.net.key.public_key() + if not only_return_existing: + # clear the account in case if we want to register twice + acme_client.net.account = None if hasattr(context, "alt_eab_url"): eab_directory = messages.Directory.from_json( {"newAccount": context.alt_eab_url} @@ -406,8 +409,14 @@ def register_account_with_eab( only_return_existing=only_return_existing, ) try: - context.vars[account_var] = acme_client.new_account(registration) + if not only_return_existing: + context.vars[account_var] = acme_client.new_account(registration) + else: + context.vars[account_var] = acme_client.query_registration( + acme_client.net.account + ) except Exception as exp: + logger.error(f"Failed to register: {exp}", exc_info=True) context.vars["error"] = exp @@ -434,6 +443,17 @@ def step_impl(context: Context, email: str, kid: str, secret: str, account_var: ) +@then("I find the existing ACME account without EAB as {account_var}") +def step_impl(context: Context, account_var: str): + acme_client = context.acme_client + # registration = messages.RegistrationResource.from_json(dict(uri="")) + registration = acme_client.net.account + try: + context.vars[account_var] = acme_client.query_registration(registration) + except Exception as exp: + context.vars["error"] = exp + + @then("I register a new ACME account with email {email} without EAB") def step_impl(context: Context, email: str): acme_client = context.acme_client @@ -600,6 +620,19 @@ def step_impl(context: Context, var_path: str, jq_query: str): ) +@then("the value {var_path} with should be absent") +def step_impl(context: Context, var_path: str): + try: + value = eval_var(context, var_path) + except Exception as exp: + if isinstance(exp, KeyError): + return + raise + assert False, ( + f"value at {var_path!r} should be absent, but we got this instead: {value!r}" + ) + + @then('the value {var_path} with jq "{jq_query}" should be equal to {expected}') def step_impl(context: Context, var_path: str, jq_query: str, expected: str): value, result = apply_value_with_jq( @@ -615,13 +648,14 @@ def step_impl(context: Context, var_path: str, jq_query: str, expected: str): @then('the value {var_path} with jq "{jq_query}" should match pattern {regex}') def step_impl(context: Context, var_path: str, jq_query: str, regex: str): + actual_regex = replace_vars(regex, context.vars) value, result = apply_value_with_jq( context=context, var_path=var_path, jq_query=jq_query, ) - assert re.match(replace_vars(regex, context.vars), result), ( - f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {regex!r}" + assert re.match(actual_regex, result), ( + f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {actual_regex!r}" ) diff --git a/backend/bdd/features/steps/utils.py b/backend/bdd/features/steps/utils.py index 4ee7c8921d..93269d8bcb 100644 --- a/backend/bdd/features/steps/utils.py +++ b/backend/bdd/features/steps/utils.py @@ -15,6 +15,7 @@ from josepy import JSONObjectWithFields ACC_KEY_BITS = 2048 ACC_KEY_PUBLIC_EXPONENT = 65537 +NOCK_API_PREFIX = "/api/__bdd_nock__" logger = logging.getLogger(__name__) faker = Faker() @@ -265,7 +266,7 @@ def x509_cert_to_dict(cert: x509.Certificate) -> dict: def define_nock(context: Context, definitions: list[dict]): jwt_token = context.vars["AUTH_TOKEN"] response = context.http_client.post( - "/api/v1/bdd-nock/define", + f"{NOCK_API_PREFIX}/define", headers=dict(authorization="Bearer {}".format(jwt_token)), json=dict(definitions=definitions), ) @@ -275,7 +276,7 @@ def define_nock(context: Context, definitions: list[dict]): def restore_nock(context: Context): jwt_token = context.vars["AUTH_TOKEN"] response = context.http_client.post( - "/api/v1/bdd-nock/restore", + f"{NOCK_API_PREFIX}/restore", headers=dict(authorization="Bearer {}".format(jwt_token)), json=dict(), ) @@ -285,7 +286,7 @@ def restore_nock(context: Context): def clean_all_nock(context: Context): jwt_token = context.vars["AUTH_TOKEN"] response = context.http_client.post( - "/api/v1/bdd-nock/clean-all", + f"{NOCK_API_PREFIX}/clean-all", headers=dict(authorization="Bearer {}".format(jwt_token)), json=dict(), ) diff --git a/backend/e2e-test/routes/v1/secret-approval-policy.spec.ts b/backend/e2e-test/routes/v1/secret-approval-policy.spec.ts index 6244cf735a..d67dc1d5e0 100644 --- a/backend/e2e-test/routes/v1/secret-approval-policy.spec.ts +++ b/backend/e2e-test/routes/v1/secret-approval-policy.spec.ts @@ -1,7 +1,12 @@ import { seedData1 } from "@app/db/seed-data"; import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types"; -const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => { +const createPolicy = async (dto: { + name: string; + secretPath: string; + approvers: { type: ApproverType.User; id: string }[]; + approvals: number; +}) => { const res = await testServer.inject({ method: "POST", url: `/api/v1/secret-approvals`, @@ -27,7 +32,7 @@ describe("Secret approval policy router", async () => { const policy = await createPolicy({ secretPath: "/", approvals: 1, - approvers: [{id:seedData1.id, type: ApproverType.User}], + approvers: [{ id: seedData1.id, type: ApproverType.User }], name: "test-policy" }); diff --git a/backend/nodemon.json b/backend/nodemon.json index 856f9ee51c..2542bca4dc 100644 --- a/backend/nodemon.json +++ b/backend/nodemon.json @@ -1,6 +1,8 @@ { - "watch": ["src"], + "watch": [ + "src" + ], "ext": ".ts,.js", "ignore": [], - "exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine" -} + "exec": "tsx --tsconfig=./tsconfig.dev.json --inspect=0.0.0.0:9229 ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine" +} \ No newline at end of file diff --git a/backend/package.json b/backend/package.json index 9a0d9772db..0e17bb2b73 100644 --- a/backend/package.json +++ b/backend/package.json @@ -32,7 +32,7 @@ "binary:clean": "rm -rf ./dist && rm -rf ./binary", "binary:rename-imports": "ts-node ./scripts/rename-mjs.ts", "test": "echo \"Error: no test specified\" && exit 1", - "dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine", + "dev": "tsx watch --clear-screen=false ./src/main.ts --config tsconfig.dev.json | pino-pretty --colorize --colorizeObjects --singleLine", "dev:docker": "nodemon", "build": "tsup --sourcemap", "build:frontend": "npm run build --prefix ../frontend", @@ -266,4 +266,4 @@ "zod": "^3.22.4", "zod-to-json-schema": "^3.24.5" } -} +} \ No newline at end of file diff --git a/backend/scripts/create-migration.ts b/backend/scripts/create-migration.ts index 34f4aca419..3e34b94139 100644 --- a/backend/scripts/create-migration.ts +++ b/backend/scripts/create-migration.ts @@ -2,7 +2,7 @@ import { execSync } from "child_process"; import path from "path"; import promptSync from "prompt-sync"; -import slugify from "@sindresorhus/slugify" +import slugify from "@sindresorhus/slugify"; const prompt = promptSync({ sigint: true }); diff --git a/backend/src/db/migrations/20250824192801_backfill-secret-read-compat-flag.ts b/backend/src/db/migrations/20250824192801_backfill-secret-read-compat-flag.ts index 7a629ca520..bb9e3ac9a1 100644 --- a/backend/src/db/migrations/20250824192801_backfill-secret-read-compat-flag.ts +++ b/backend/src/db/migrations/20250824192801_backfill-secret-read-compat-flag.ts @@ -14,13 +14,16 @@ export async function up(knex: Knex): Promise { if (rows.length > 0) { for (let i = 0; i < rows.length; i += BATCH_SIZE) { const batch = rows.slice(i, i + BATCH_SIZE); + const ids = batch.map((row) => row.id); // eslint-disable-next-line no-await-in-loop - await knex(TableName.SecretApprovalPolicy) - .whereIn( - "id", - batch.map((row) => row.id) - ) - .update({ shouldCheckSecretPermission: true }); + await knex.raw( + ` + UPDATE ?? + SET ?? = true + WHERE ?? IN (${ids.map(() => "?").join(",")}) + `, + [TableName.SecretApprovalPolicy, "shouldCheckSecretPermission", "id", ids] + ); } } } diff --git a/backend/src/db/migrations/20251119025017_add-unique-constraint-for-pki-acme-account-public-key-and-profile-id.ts b/backend/src/db/migrations/20251119025017_add-unique-constraint-for-pki-acme-account-public-key-and-profile-id.ts new file mode 100644 index 0000000000..5bc4601e33 --- /dev/null +++ b/backend/src/db/migrations/20251119025017_add-unique-constraint-for-pki-acme-account-public-key-and-profile-id.ts @@ -0,0 +1,32 @@ +import { Knex } from "knex"; + +import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists"; +import { TableName } from "@app/db/schemas"; + +const CONSTRAINT_NAME = "unique_pki_acme_account_public_key_and_profile_id"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) { + const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId"); + const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint"); + + if (hasProfileId && hasPublicKeyThumbprint) { + await knex.schema.alterTable(TableName.PkiAcmeAccount, (table) => { + table.unique(["profileId", "publicKeyThumbprint"], { indexName: CONSTRAINT_NAME }); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) { + const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId"); + const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint"); + + await knex.schema.alterTable(TableName.PkiAcmeAccount, async () => { + if (hasProfileId && hasPublicKeyThumbprint) { + await dropConstraintIfExists(TableName.PkiAcmeAccount, CONSTRAINT_NAME, knex); + } + }); + } +} diff --git a/backend/src/db/migrations/20251119213350_remove-should-check-secret-permission.ts b/backend/src/db/migrations/20251119213350_remove-should-check-secret-permission.ts new file mode 100644 index 0000000000..7c1758095e --- /dev/null +++ b/backend/src/db/migrations/20251119213350_remove-should-check-secret-permission.ts @@ -0,0 +1,19 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) { + await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => { + t.dropColumn("shouldCheckSecretPermission"); + }); + } +} + +export async function down(knex: Knex): Promise { + if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission"))) { + await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => { + t.boolean("shouldCheckSecretPermission").nullable(); + }); + } +} diff --git a/backend/src/db/schemas/secret-approval-policies.ts b/backend/src/db/schemas/secret-approval-policies.ts index dbb881db39..0273e617cd 100644 --- a/backend/src/db/schemas/secret-approval-policies.ts +++ b/backend/src/db/schemas/secret-approval-policies.ts @@ -17,8 +17,7 @@ export const SecretApprovalPoliciesSchema = z.object({ updatedAt: z.date(), enforcementLevel: z.string().default("hard"), deletedAt: z.date().nullable().optional(), - allowedSelfApprovals: z.boolean().default(true), - shouldCheckSecretPermission: z.boolean().nullable().optional() + allowedSelfApprovals: z.boolean().default(true) }); export type TSecretApprovalPolicies = z.infer; diff --git a/backend/src/ee/routes/v1/group-router.ts b/backend/src/ee/routes/v1/group-router.ts index ec235d34eb..4696bef26d 100644 --- a/backend/src/ee/routes/v1/group-router.ts +++ b/backend/src/ee/routes/v1/group-router.ts @@ -1,8 +1,14 @@ import { z } from "zod"; -import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas"; -import { EFilterReturnedUsers } from "@app/ee/services/group/group-types"; +import { GroupsSchema, OrgMembershipRole, ProjectsSchema, UsersSchema } from "@app/db/schemas"; +import { + EFilterReturnedProjects, + EFilterReturnedUsers, + EGroupProjectsOrderBy +} from "@app/ee/services/group/group-types"; import { ApiDocsTags, GROUPS } from "@app/lib/api-docs"; +import { OrderByDirection } from "@app/lib/types"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { slugSchema } from "@app/server/lib/schemas"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; @@ -11,6 +17,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ url: "/", method: "POST", + config: { + rateLimit: writeLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, @@ -40,6 +49,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ url: "/:id", method: "GET", + config: { + rateLimit: readLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, @@ -69,6 +81,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ url: "/", method: "GET", + config: { + rateLimit: readLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, @@ -93,6 +108,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ url: "/:id", method: "PATCH", + config: { + rateLimit: writeLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, @@ -128,6 +146,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ url: "/:id", method: "DELETE", + config: { + rateLimit: writeLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, @@ -155,6 +176,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ method: "GET", url: "/:id/users", + config: { + rateLimit: readLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, @@ -163,7 +187,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { id: z.string().trim().describe(GROUPS.LIST_USERS.id) }), querystring: z.object({ - offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset), + offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_USERS.offset), limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit), username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username), search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search), @@ -203,9 +227,72 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "GET", + url: "/:id/projects", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + hide: false, + tags: [ApiDocsTags.Groups], + params: z.object({ + id: z.string().trim().describe(GROUPS.LIST_PROJECTS.id) + }), + querystring: z.object({ + offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_PROJECTS.offset), + limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_PROJECTS.limit), + search: z.string().trim().optional().describe(GROUPS.LIST_PROJECTS.search), + filter: z.nativeEnum(EFilterReturnedProjects).optional().describe(GROUPS.LIST_PROJECTS.filterProjects), + orderBy: z + .nativeEnum(EGroupProjectsOrderBy) + .default(EGroupProjectsOrderBy.Name) + .describe(GROUPS.LIST_PROJECTS.orderBy), + orderDirection: z + .nativeEnum(OrderByDirection) + .default(OrderByDirection.ASC) + .describe(GROUPS.LIST_PROJECTS.orderDirection) + }), + response: { + 200: z.object({ + projects: ProjectsSchema.pick({ + id: true, + name: true, + slug: true, + description: true, + type: true + }) + .merge( + z.object({ + joinedGroupAt: z.date().nullable() + }) + ) + .array(), + totalCount: z.number() + }) + } + }, + handler: async (req) => { + const { projects, totalCount } = await server.services.group.listGroupProjects({ + id: req.params.id, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + ...req.query + }); + + return { projects, totalCount }; + } + }); + server.route({ method: "POST", url: "/:id/users/:username", + config: { + rateLimit: writeLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, @@ -241,6 +328,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => { server.route({ method: "DELETE", url: "/:id/users/:username", + config: { + rateLimit: writeLimit + }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), schema: { hide: false, diff --git a/backend/src/ee/routes/v1/secret-approval-request-router.ts b/backend/src/ee/routes/v1/secret-approval-request-router.ts index bd5bacc5fc..925b92fda8 100644 --- a/backend/src/ee/routes/v1/secret-approval-request-router.ts +++ b/backend/src/ee/routes/v1/secret-approval-request-router.ts @@ -305,8 +305,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv secretPath: z.string().optional().nullable(), enforcementLevel: z.string(), deletedAt: z.date().nullish(), - allowedSelfApprovals: z.boolean(), - shouldCheckSecretPermission: z.boolean().nullable().optional() + allowedSelfApprovals: z.boolean() }), environment: z.string(), statusChangedByUser: approvalRequestUser.optional(), diff --git a/backend/src/ee/services/app-connections/chef/chef-connection-fns.ts b/backend/src/ee/services/app-connections/chef/chef-connection-fns.ts index 6cef8373fd..48b2508fc5 100644 --- a/backend/src/ee/services/app-connections/chef/chef-connection-fns.ts +++ b/backend/src/ee/services/app-connections/chef/chef-connection-fns.ts @@ -27,6 +27,17 @@ export const getChefServerUrl = async (serverUrl?: string) => { return chefServerUrl; }; +const buildSecureUrl = (baseUrl: string, path: string): string => { + try { + const url = new URL(path, baseUrl); + return url.toString(); + } catch (error) { + throw new BadRequestError({ + message: "Invalid URL construction parameters" + }); + } +}; + // Helper to ensure private key is in proper PEM format const formatPrivateKey = (key: string): string => { let formattedKey = key.trim(); @@ -138,7 +149,8 @@ export const validateChefConnectionCredentials = async (config: TChefConnectionC const headers = getChefAuthHeaders("GET", path, "", inputCredentials.userName, inputCredentials.privateKey); - await request.get(`${hostServerUrl}${path}`, { + const secureUrl = buildSecureUrl(hostServerUrl, path); + await request.get(secureUrl, { headers }); } catch (error: unknown) { @@ -168,7 +180,8 @@ export const listChefDataBags = async (appConnection: TChefConnection): Promise< const headers = getChefAuthHeaders("GET", path, body, userName, privateKey); - const res = await request.get>(`${hostServerUrl}${path}`, { + const secureUrl = buildSecureUrl(hostServerUrl, path); + const res = await request.get>(secureUrl, { headers }); @@ -203,7 +216,8 @@ export const listChefDataBagItems = async ( const headers = getChefAuthHeaders("GET", path, body, userName, privateKey); - const res = await request.get>(`${hostServerUrl}${path}`, { + const secureUrl = buildSecureUrl(hostServerUrl, path); + const res = await request.get>(secureUrl, { headers }); @@ -238,7 +252,8 @@ export const getChefDataBagItem = async ({ const headers = getChefAuthHeaders("GET", path, body, userName, privateKey); - const res = await request.get(`${hostServerUrl}${path}`, { + const secureUrl = buildSecureUrl(hostServerUrl, path); + const res = await request.get(secureUrl, { headers }); @@ -255,6 +270,38 @@ export const getChefDataBagItem = async ({ } }; +export const createChefDataBagItem = async ({ + serverUrl, + userName, + privateKey, + orgName, + dataBagName, + data +}: Omit): Promise => { + try { + const path = `/organizations/${orgName}/data/${dataBagName}`; + const body = JSON.stringify(data); + + const hostServerUrl = await getChefServerUrl(serverUrl); + + const headers = getChefAuthHeaders("POST", path, body, userName, privateKey); + + const secureUrl = buildSecureUrl(hostServerUrl, path); + await request.post(secureUrl, data, { + headers + }); + } catch (error) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to create Chef data bag item: ${error.message || "Unknown error"}` + }); + } + throw new BadRequestError({ + message: "Unable to create Chef data bag item" + }); + } +}; + export const updateChefDataBagItem = async ({ serverUrl, userName, @@ -272,7 +319,8 @@ export const updateChefDataBagItem = async ({ const headers = getChefAuthHeaders("PUT", path, body, userName, privateKey); - await request.put(`${hostServerUrl}${path}`, data, { + const secureUrl = buildSecureUrl(hostServerUrl, path); + await request.put(secureUrl, data, { headers }); } catch (error) { @@ -286,3 +334,35 @@ export const updateChefDataBagItem = async ({ }); } }; + +export const removeChefDataBagItem = async ({ + serverUrl, + userName, + privateKey, + orgName, + dataBagName, + dataBagItemName +}: Omit): Promise => { + try { + const path = `/organizations/${orgName}/data/${dataBagName}/${dataBagItemName}`; + const body = ""; + + const hostServerUrl = await getChefServerUrl(serverUrl); + + const headers = getChefAuthHeaders("DELETE", path, body, userName, privateKey); + + const secureUrl = buildSecureUrl(hostServerUrl, path); + await request.delete(secureUrl, { + headers + }); + } catch (error) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to remove Chef data bag item: ${error.message || "Unknown error"}` + }); + } + throw new BadRequestError({ + message: "Unable to remove Chef data bag item" + }); + } +}; diff --git a/backend/src/ee/services/group/group-dal.ts b/backend/src/ee/services/group/group-dal.ts index 6fb02207da..ced8410b76 100644 --- a/backend/src/ee/services/group/group-dal.ts +++ b/backend/src/ee/services/group/group-dal.ts @@ -4,8 +4,9 @@ import { TDbClient } from "@app/db"; import { AccessScope, TableName, TGroups } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; -import { EFilterReturnedUsers } from "./group-types"; +import { EFilterReturnedProjects, EFilterReturnedUsers, EGroupProjectsOrderBy } from "./group-types"; export type TGroupDALFactory = ReturnType; @@ -166,6 +167,89 @@ export const groupDALFactory = (db: TDbClient) => { } }; + const findAllGroupProjects = async ({ + orgId, + groupId, + offset, + limit, + search, + filter, + orderBy, + orderDirection + }: { + orgId: string; + groupId: string; + offset?: number; + limit?: number; + search?: string; + filter?: EFilterReturnedProjects; + orderBy?: EGroupProjectsOrderBy; + orderDirection?: OrderByDirection; + }) => { + try { + const query = db + .replicaNode()(TableName.Project) + .where(`${TableName.Project}.orgId`, orgId) + .leftJoin(TableName.Membership, (bd) => { + bd.on(`${TableName.Project}.id`, "=", `${TableName.Membership}.scopeProjectId`) + .andOn(`${TableName.Membership}.actorGroupId`, "=", db.raw("?", [groupId])) + .andOn(`${TableName.Membership}.scope`, "=", db.raw("?", [AccessScope.Project])); + }) + .select( + db.ref("id").withSchema(TableName.Project), + db.ref("name").withSchema(TableName.Project), + db.ref("slug").withSchema(TableName.Project), + db.ref("description").withSchema(TableName.Project), + db.ref("type").withSchema(TableName.Project), + db.ref("createdAt").withSchema(TableName.Membership).as("joinedGroupAt"), + db.raw(`count(*) OVER() as "totalCount"`) + ) + .offset(offset ?? 0); + + if (orderBy) { + void query.orderByRaw( + `LOWER(${TableName.Project}.??) ${orderDirection === OrderByDirection.ASC ? "asc" : "desc"}`, + [orderBy] + ); + } + + if (limit) { + void query.limit(limit); + } + + if (search) { + void query.andWhereRaw( + `CONCAT_WS(' ', "${TableName.Project}"."name", "${TableName.Project}"."slug", "${TableName.Project}"."description") ilike ?`, + [`%${search}%`] + ); + } + + switch (filter) { + case EFilterReturnedProjects.ASSIGNED_PROJECTS: + void query.whereNotNull(`${TableName.Membership}.id`); + break; + case EFilterReturnedProjects.UNASSIGNED_PROJECTS: + void query.whereNull(`${TableName.Membership}.id`); + break; + default: + break; + } + + const projects = await query; + + return { + projects: projects.map(({ joinedGroupAt, ...project }) => ({ + ...project, + joinedGroupAt + })), + // @ts-expect-error col select is raw and not strongly typed + totalCount: Number(projects?.[0]?.totalCount ?? 0) + }; + } catch (error) { + throw new DatabaseError({ error, name: "Find all group projects" }); + } + }; + const findGroupsByProjectId = async (projectId: string, tx?: Knex) => { try { const docs = await (tx || db.replicaNode())(TableName.Groups) @@ -230,6 +314,7 @@ export const groupDALFactory = (db: TDbClient) => { findGroups, findByOrgId, findAllGroupPossibleMembers, + findAllGroupProjects, findGroupsByProjectId, findById, findOne diff --git a/backend/src/ee/services/group/group-service.ts b/backend/src/ee/services/group/group-service.ts index 956d7853a5..1a6a046a6b 100644 --- a/backend/src/ee/services/group/group-service.ts +++ b/backend/src/ee/services/group/group-service.ts @@ -24,6 +24,7 @@ import { TCreateGroupDTO, TDeleteGroupDTO, TGetGroupByIdDTO, + TListGroupProjectsDTO, TListGroupUsersDTO, TRemoveUserFromGroupDTO, TUpdateGroupDTO @@ -34,7 +35,14 @@ type TGroupServiceFactoryDep = { userDAL: Pick; groupDAL: Pick< TGroupDALFactory, - "create" | "findOne" | "update" | "delete" | "findAllGroupPossibleMembers" | "findById" | "transaction" + | "create" + | "findOne" + | "update" + | "delete" + | "findAllGroupPossibleMembers" + | "findById" + | "transaction" + | "findAllGroupProjects" >; membershipGroupDAL: Pick; membershipRoleDAL: Pick; @@ -367,6 +375,55 @@ export const groupServiceFactory = ({ return { users: members, totalCount }; }; + const listGroupProjects = async ({ + id, + offset, + limit, + search, + filter, + orderBy, + orderDirection, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TListGroupProjectsDTO) => { + if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); + + const { permission } = await permissionService.getOrgPermission({ + scope: OrganizationActionScope.Any, + actor, + actorId, + orgId: actorOrgId, + actorAuthMethod, + actorOrgId + }); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups); + + const group = await groupDAL.findOne({ + orgId: actorOrgId, + id + }); + + if (!group) + throw new NotFoundError({ + message: `Failed to find group with ID ${id}` + }); + + const { projects, totalCount } = await groupDAL.findAllGroupProjects({ + orgId: group.orgId, + groupId: group.id, + offset, + limit, + search, + filter, + orderBy, + orderDirection + }); + + return { projects, totalCount }; + }; + const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => { if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" }); @@ -535,6 +592,7 @@ export const groupServiceFactory = ({ updateGroup, deleteGroup, listGroupUsers, + listGroupProjects, addUserToGroup, removeUserFromGroup, getGroupById diff --git a/backend/src/ee/services/group/group-types.ts b/backend/src/ee/services/group/group-types.ts index 4b07422018..335b6d72bb 100644 --- a/backend/src/ee/services/group/group-types.ts +++ b/backend/src/ee/services/group/group-types.ts @@ -2,7 +2,7 @@ import { Knex } from "knex"; import { TGroups } from "@app/db/schemas"; import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal"; -import { TGenericPermission } from "@app/lib/types"; +import { OrderByDirection, TGenericPermission } from "@app/lib/types"; import { TMembershipGroupDALFactory } from "@app/services/membership-group/membership-group-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; @@ -42,6 +42,16 @@ export type TListGroupUsersDTO = { filter?: EFilterReturnedUsers; } & TGenericPermission; +export type TListGroupProjectsDTO = { + id: string; + offset: number; + limit: number; + search?: string; + filter?: EFilterReturnedProjects; + orderBy?: EGroupProjectsOrderBy; + orderDirection?: OrderByDirection; +} & TGenericPermission; + export type TListProjectGroupUsersDTO = TListGroupUsersDTO & { projectId: string; }; @@ -111,3 +121,12 @@ export enum EFilterReturnedUsers { EXISTING_MEMBERS = "existingMembers", NON_MEMBERS = "nonMembers" } + +export enum EFilterReturnedProjects { + ASSIGNED_PROJECTS = "assignedProjects", + UNASSIGNED_PROJECTS = "unassignedProjects" +} + +export enum EGroupProjectsOrderBy { + Name = "name" +} diff --git a/backend/src/ee/services/license/__mocks__/license-fns.ts b/backend/src/ee/services/license/__mocks__/license-fns.ts index d303859bb3..2f29e4812a 100644 --- a/backend/src/ee/services/license/__mocks__/license-fns.ts +++ b/backend/src/ee/services/license/__mocks__/license-fns.ts @@ -39,3 +39,9 @@ export const getDefaultOnPremFeatures = () => { }; export const setupLicenseRequestWithStore = () => {}; + +export const getLicenseKeyConfig = () => { + return { + isValid: false + }; +}; diff --git a/backend/src/ee/services/license/license-fns.ts b/backend/src/ee/services/license/license-fns.ts index 14b7bcfbd8..09ff9e1081 100644 --- a/backend/src/ee/services/license/license-fns.ts +++ b/backend/src/ee/services/license/license-fns.ts @@ -1,13 +1,56 @@ import axios, { AxiosError } from "axios"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; -import { getConfig } from "@app/lib/config/env"; +import { getConfig, TEnvConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { UserAliasType } from "@app/services/user-alias/user-alias-types"; -import { TFeatureSet } from "./license-types"; +import { LicenseType, TFeatureSet, TLicenseKeyConfig, TOfflineLicenseContents } from "./license-types"; + +export const isOfflineLicenseKey = (licenseKey: string): boolean => { + try { + const contents = JSON.parse(Buffer.from(licenseKey, "base64").toString("utf8")) as TOfflineLicenseContents; + + return "signature" in contents && "license" in contents; + } catch (error) { + return false; + } +}; + +export const getLicenseKeyConfig = ( + config?: Pick +): TLicenseKeyConfig => { + const cfg = config || getConfig(); + + if (!cfg) { + return { isValid: false }; + } + + const licenseKey = cfg.LICENSE_KEY; + + if (licenseKey) { + if (isOfflineLicenseKey(licenseKey)) { + return { isValid: true, licenseKey, type: LicenseType.Offline }; + } + + return { isValid: true, licenseKey, type: LicenseType.Online }; + } + + const offlineLicenseKey = cfg.LICENSE_KEY_OFFLINE; + + // backwards compatibility + if (offlineLicenseKey) { + if (isOfflineLicenseKey(offlineLicenseKey)) { + return { isValid: true, licenseKey: offlineLicenseKey, type: LicenseType.Offline }; + } + + return { isValid: false }; + } + + return { isValid: false }; +}; export const getDefaultOnPremFeatures = (): TFeatureSet => ({ _id: null, diff --git a/backend/src/ee/services/license/license-service.ts b/backend/src/ee/services/license/license-service.ts index bbd6147ed3..3bbd58831a 100644 --- a/backend/src/ee/services/license/license-service.ts +++ b/backend/src/ee/services/license/license-service.ts @@ -22,9 +22,10 @@ import { OrgPermissionBillingActions, OrgPermissionSubjects } from "../permissio import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums"; import { TLicenseDALFactory } from "./license-dal"; -import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns"; +import { getDefaultOnPremFeatures, getLicenseKeyConfig, setupLicenseRequestWithStore } from "./license-fns"; import { InstanceType, + LicenseType, TAddOrgPmtMethodDTO, TAddOrgTaxIdDTO, TCreateOrgPortalSession, @@ -77,6 +78,7 @@ export const licenseServiceFactory = ({ let instanceType = InstanceType.OnPrem; let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures(); let selfHostedLicense: TOfflineLicense | null = null; + const licenseKeyConfig = getLicenseKeyConfig(envConfig); const licenseServerCloudApi = setupLicenseRequestWithStore( envConfig.LICENSE_SERVER_URL || "", @@ -85,10 +87,13 @@ export const licenseServiceFactory = ({ envConfig.INTERNAL_REGION ); + const onlineLicenseKey = + licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online ? licenseKeyConfig.licenseKey : ""; + const licenseServerOnPremApi = setupLicenseRequestWithStore( envConfig.LICENSE_SERVER_URL || "", LICENSE_SERVER_ON_PREM_LOGIN, - envConfig.LICENSE_KEY || "", + onlineLicenseKey, envConfig.INTERNAL_REGION ); @@ -131,7 +136,7 @@ export const licenseServiceFactory = ({ return; } - if (envConfig.LICENSE_KEY) { + if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online) { const token = await licenseServerOnPremApi.refreshLicense(); if (token) { await syncLicenseKeyOnPremFeatures(true); @@ -142,10 +147,10 @@ export const licenseServiceFactory = ({ return; } - if (envConfig.LICENSE_KEY_OFFLINE) { + if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline) { let isValidOfflineLicense = true; const contents: TOfflineLicenseContents = JSON.parse( - Buffer.from(envConfig.LICENSE_KEY_OFFLINE, "base64").toString("utf8") + Buffer.from(licenseKeyConfig.licenseKey, "base64").toString("utf8") ); const isVerified = await verifyOfflineLicense(JSON.stringify(contents.license), contents.signature); @@ -184,7 +189,7 @@ export const licenseServiceFactory = ({ }; const initializeBackgroundSync = async () => { - if (envConfig.LICENSE_KEY) { + if (licenseKeyConfig?.isValid && licenseKeyConfig?.type === LicenseType.Online) { logger.info("Setting up background sync process for refresh onPremFeatures"); const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures); job.start(); diff --git a/backend/src/ee/services/license/license-types.ts b/backend/src/ee/services/license/license-types.ts index 5157b0730d..8897eaabcf 100644 --- a/backend/src/ee/services/license/license-types.ts +++ b/backend/src/ee/services/license/license-types.ts @@ -136,3 +136,18 @@ export type TDelOrgTaxIdDTO = TOrgPermission & { taxId: string }; export type TOrgInvoiceDTO = TOrgPermission; export type TOrgLicensesDTO = TOrgPermission; + +export enum LicenseType { + Offline = "offline", + Online = "online" +} + +export type TLicenseKeyConfig = + | { + isValid: false; + } + | { + isValid: true; + licenseKey: string; + type: LicenseType; + }; diff --git a/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts b/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts index 9148b03366..7a3747fedf 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts @@ -74,7 +74,12 @@ export const pkiAcmeChallengeServiceFactory = ({ // Notice: well, we are in a transaction, ideally we should not hold transaction and perform // a long running operation for long time. But assuming we are not performing a tons of // challenge validation at the same time, it should be fine. - const challengeResponse = await fetch(challengeUrl, { signal: AbortSignal.timeout(timeoutMs) }); + const challengeResponse = await fetch(challengeUrl, { + // In case if we override the host in the development mode, still provide the original host in the header + // to help the upstream server to validate the request + headers: { Host: host }, + signal: AbortSignal.timeout(timeoutMs) + }); if (challengeResponse.status !== 200) { throw new AcmeIncorrectResponseError({ message: `ACME challenge response is not 200: ${challengeResponse.status}` diff --git a/backend/src/ee/services/pki-acme/pki-acme-schemas.ts b/backend/src/ee/services/pki-acme/pki-acme-schemas.ts index 58ca7e8333..23b86d172c 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-schemas.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-schemas.ts @@ -58,7 +58,15 @@ export const GetAcmeDirectoryResponseSchema = z.object({ newNonce: z.string(), newAccount: z.string(), newOrder: z.string(), - revokeCert: z.string().optional() + revokeCert: z.string().optional(), + meta: z + .object({ + termsOfService: z.string().optional(), + website: z.string().optional(), + caaIdentities: z.array(z.string()).optional(), + externalAccountRequired: z.boolean().optional() + }) + .optional() }); // New Account payload schema diff --git a/backend/src/ee/services/pki-acme/pki-acme-service.ts b/backend/src/ee/services/pki-acme/pki-acme-service.ts index 43da08b1cd..4f560ade76 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-service.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-service.ts @@ -206,6 +206,9 @@ export const pkiAcmeServiceFactory = ({ const { protectedHeader: rawProtectedHeader, payload: rawPayload } = result; try { const protectedHeader = ProtectedHeaderSchema.parse(rawProtectedHeader); + if (protectedHeader.jwk && protectedHeader.kid) { + throw new AcmeMalformedError({ message: "Both JWK and KID are provided in the protected header" }); + } const parsedUrl = (() => { try { return new URL(protectedHeader.url); @@ -288,6 +291,7 @@ export const pkiAcmeServiceFactory = ({ url, rawJwsPayload, getJWK: async (protectedHeader) => { + // get jwk instead of kid if (!protectedHeader.kid) { throw new AcmeMalformedError({ message: "KID is required in the protected header" }); } @@ -353,7 +357,10 @@ export const pkiAcmeServiceFactory = ({ return { newNonce: buildUrl(profile.id, "/new-nonce"), newAccount: buildUrl(profile.id, "/new-account"), - newOrder: buildUrl(profile.id, "/new-order") + newOrder: buildUrl(profile.id, "/new-order"), + meta: { + externalAccountRequired: true + } }; }; @@ -386,11 +393,61 @@ export const pkiAcmeServiceFactory = ({ payload: TCreateAcmeAccountPayload; }): Promise> => { const profile = await validateAcmeProfile(profileId); + const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256"); + + const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg( + profileId, + alg, + publicKeyThumbprint + ); + if (onlyReturnExisting) { + if (!existingAccount) { + throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" }); + } + return { + status: 200, + body: { + status: "valid", + contact: existingAccount.emails, + orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`) + }, + headers: { + Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`), + Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"` + } + }; + } + + // Note: We only check EAB for the new account request. This is a very special case for cert-manager. + // There's a bug in their ACME client implementation, they don't take the account KID value they have + // and relying on a '{"onlyReturnExisting": true}' new-account request to find out their KID value. + // But the problem is, that new-account request doesn't come with EAB. And while the get existing account operation + // fails, they just discard the error and proceed to request a new order. Since no KID provided, their ACME + // client will send JWK instead. As a result, we are seeing KID not provide in header error for the new-order + // endpoint. + // + // To solve the problem, we lose the check for EAB a bit for the onlyReturnExisting new account request. + // It should be fine as we've already checked EAB when they created the account. + // And the private key ownership indicating they are the same user. + // ref: https://github.com/cert-manager/cert-manager/issues/7388#issuecomment-3535630925 if (!externalAccountBinding) { throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" }); } + if (existingAccount) { + return { + status: 200, + body: { + status: "valid", + contact: existingAccount.emails, + orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`) + }, + headers: { + Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`), + Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"` + } + }; + } - const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256"); const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ projectId: profile.projectId, projectDAL, @@ -441,30 +498,7 @@ export const pkiAcmeServiceFactory = ({ }); } - const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg( - profileId, - alg, - publicKeyThumbprint - ); - if (onlyReturnExisting && !existingAccount) { - throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" }); - } - if (existingAccount) { - // With the same public key, we found an existing account, just return it - return { - status: 200, - body: { - status: "valid", - contact: existingAccount.emails, - orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`) - }, - headers: { - Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`), - Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"` - } - }; - } - + // TODO: handle unique constraint violation error, should be very very rare const newAccount = await acmeAccountDAL.create({ profileId: profile.id, alg, diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts index 9f9b0604c8..395dd13bd1 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts @@ -181,11 +181,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"), tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"), tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"), - tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt"), - tx - .ref("shouldCheckSecretPermission") - .withSchema(TableName.SecretApprovalPolicy) - .as("policySecretReadAccessCompat") + tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt") ); const findById = async (id: string, tx?: Knex) => { @@ -225,8 +221,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { enforcementLevel: el.policyEnforcementLevel, envId: el.policyEnvId, deletedAt: el.policyDeletedAt, - allowedSelfApprovals: el.policyAllowedSelfApprovals, - shouldCheckSecretPermission: el.policySecretReadAccessCompat + allowedSelfApprovals: el.policyAllowedSelfApprovals } }), childrenMapper: [ diff --git a/backend/src/lib/api-docs/constants.ts b/backend/src/lib/api-docs/constants.ts index 8bd2827fcb..970f9a1a00 100644 --- a/backend/src/lib/api-docs/constants.ts +++ b/backend/src/lib/api-docs/constants.ts @@ -106,6 +106,16 @@ export const GROUPS = { filterUsers: "Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization." }, + LIST_PROJECTS: { + id: "The ID of the group to list projects for.", + offset: "The offset to start from. If you enter 10, it will start from the 10th project.", + limit: "The number of projects to return.", + search: "The text string that project name or slug will be filtered by.", + filterProjects: + "Whether to filter the list of returned projects. 'assignedProjects' will only return projects assigned to the group, 'unassignedProjects' will only return projects not assigned to the group, undefined will return all projects in the organization.", + orderBy: "The column to order projects by.", + orderDirection: "The direction to order projects in." + }, ADD_USER: { id: "The ID of the group to add the user to.", username: "The username of the user to add to the group." diff --git a/backend/src/lib/config/env.ts b/backend/src/lib/config/env.ts index 96107306f7..11de576670 100644 --- a/backend/src/lib/config/env.ts +++ b/backend/src/lib/config/env.ts @@ -400,7 +400,7 @@ const envSchema = z isAcmeDevelopmentMode: data.NODE_ENV === "development" && data.ACME_DEVELOPMENT_MODE, isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED, isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS), - isBddNockApiEnabled: data.NODE_ENV === "development" && data.BDD_NOCK_API_ENABLED, + isBddNockApiEnabled: data.NODE_ENV !== "production" && data.BDD_NOCK_API_ENABLED, REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim() ?.split(",") .map((el) => { diff --git a/backend/src/server/routes/bdd/bdd-nock-router.dev.ts b/backend/src/server/routes/bdd/bdd-nock-router.dev.ts new file mode 100644 index 0000000000..c5f6001f55 --- /dev/null +++ b/backend/src/server/routes/bdd/bdd-nock-router.dev.ts @@ -0,0 +1,104 @@ +import type { Definition } from "nock"; +import { z } from "zod"; + +import { getConfig } from "@app/lib/config/env"; +import { ForbiddenRequestError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +// When running in production, we don't want to even import nock, because it's not needed and it increases memory usage a lots. +// It once caused an outage in the production environment. +// This is why we would rather to crash the app if it's not in development mode (in that case, Kubernetes should stop it from rolling out). +if (process.env.NODE_ENV === "production") { + throw new Error("BDD Nock API can only be enabled in development or test mode"); +} + +export const registerBddNockRouter = async (server: FastifyZodProvider) => { + const appCfg = getConfig(); + const importNock = async () => { + // eslint-disable-next-line import/no-extraneous-dependencies + const { default: nock } = await import("nock"); + return nock; + }; + + const checkIfBddNockApiEnabled = () => { + // Note: Please note that this API is only available in development mode and only for BDD tests. + // This endpoint should NEVER BE ENABLED IN PRODUCTION! + if (appCfg.NODE_ENV === "production" || !appCfg.isBddNockApiEnabled) { + throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" }); + } + }; + + server.route({ + method: "POST", + url: "/define", + schema: { + body: z.object({ definitions: z.unknown().array() }), + response: { + 200: z.object({ status: z.string() }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + checkIfBddNockApiEnabled(); + const { body } = req; + const { definitions } = body; + logger.info(definitions, "Defining nock"); + const processedDefinitions = definitions.map((definition: unknown) => { + const { path, ...rest } = definition as Definition; + return { + ...rest, + path: + path !== undefined && typeof path === "string" + ? path + : new RegExp((path as unknown as { regex: string }).regex ?? "") + } as Definition; + }); + + const nock = await importNock(); + nock.define(processedDefinitions); + // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call. + if (!nock.isActive()) { + nock.activate(); + } + return { status: "ok" }; + } + }); + + server.route({ + method: "POST", + url: "/clean-all", + schema: { + response: { + 200: z.object({ status: z.string() }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async () => { + checkIfBddNockApiEnabled(); + logger.info("Cleaning all nocks"); + const nock = await importNock(); + nock.cleanAll(); + return { status: "ok" }; + } + }); + + server.route({ + method: "POST", + url: "/restore", + schema: { + response: { + 200: z.object({ status: z.string() }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async () => { + checkIfBddNockApiEnabled(); + logger.info("Restore network requests from nock"); + const nock = await importNock(); + nock.restore(); + return { status: "ok" }; + } + }); +}; diff --git a/backend/src/server/routes/bdd/bdd-nock-router.ts b/backend/src/server/routes/bdd/bdd-nock-router.ts new file mode 100644 index 0000000000..90f2ed00c6 --- /dev/null +++ b/backend/src/server/routes/bdd/bdd-nock-router.ts @@ -0,0 +1,6 @@ +export const registerBddNockRouter = async () => { + // This route is only available in development or test mode. + // The actual implementation is in the dev.ts file and will be aliased to that file in development or test mode. + // And if somehow we try to enable it in production, we will throw an error. + throw new Error("BDD Nock should not be enabled in production"); +}; diff --git a/backend/src/server/routes/index.ts b/backend/src/server/routes/index.ts index 5dd7a1c22a..2b2023eb2b 100644 --- a/backend/src/server/routes/index.ts +++ b/backend/src/server/routes/index.ts @@ -1,3 +1,4 @@ +import { registerBddNockRouter } from "@bdd_routes/bdd-nock-router"; import { CronJob } from "cron"; import { Knex } from "knex"; import { monitorEventLoopDelay } from "perf_hooks"; @@ -2431,6 +2432,7 @@ export const registerRoutes = async ( } } + await kmsService.startService(hsmStatus); await telemetryQueue.startTelemetryCheck(); await telemetryQueue.startAggregatedEventsJob(); await dailyResourceCleanUp.init(); @@ -2443,7 +2445,6 @@ export const registerRoutes = async ( await pkiSubscriberQueue.startDailyAutoRenewalJob(); await pkiAlertV2Queue.init(); await certificateV3Queue.init(); - await kmsService.startService(hsmStatus); await microsoftTeamsService.start(); await dynamicSecretQueueService.init(); await eventBusService.init(); @@ -2698,6 +2699,12 @@ export const registerRoutes = async ( await server.register(registerV3Routes, { prefix: "/api/v3" }); await server.register(registerV4Routes, { prefix: "/api/v4" }); + // Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests. + // This route should NEVER BE ENABLED IN PRODUCTION! + if (getConfig().isBddNockApiEnabled) { + await server.register(registerBddNockRouter, { prefix: "/api/__bdd_nock__" }); + } + server.addHook("onClose", async () => { cronJobs.forEach((job) => job.stop()); await telemetryService.flushAll(); diff --git a/backend/src/server/routes/v1/admin-router.ts b/backend/src/server/routes/v1/admin-router.ts index ddb3f23264..f6ec36f6f0 100644 --- a/backend/src/server/routes/v1/admin-router.ts +++ b/backend/src/server/routes/v1/admin-router.ts @@ -9,6 +9,8 @@ import { SuperAdminSchema, UsersSchema } from "@app/db/schemas"; +import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns"; +import { LicenseType } from "@app/ee/services/license/license-types"; import { getConfig, overridableKeys } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError } from "@app/lib/errors"; @@ -65,6 +67,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { const config = await getServerCfg(); const serverEnvs = getConfig(); + const licenseKeyConfig = getLicenseKeyConfig(); + const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline; + return { config: { ...config, @@ -73,7 +78,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING, kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN, paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED, - isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE + isOfflineUsageReportsEnabled: hasOfflineLicense } }; } diff --git a/backend/src/server/routes/v1/bdd-nock-router.ts b/backend/src/server/routes/v1/bdd-nock-router.ts deleted file mode 100644 index 6a32cac20a..0000000000 --- a/backend/src/server/routes/v1/bdd-nock-router.ts +++ /dev/null @@ -1,87 +0,0 @@ -// import { z } from "zod"; - -// import { getConfig } from "@app/lib/config/env"; -// import { ForbiddenRequestError } from "@app/lib/errors"; -// import { logger } from "@app/lib/logger"; -// import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -// import { AuthMode } from "@app/services/auth/auth-type"; - -// export const registerBddNockRouter = async (server: FastifyZodProvider) => { -// const checkIfBddNockApiEnabled = () => { -// const appCfg = getConfig(); -// // Note: Please note that this API is only available in development mode and only for BDD tests. -// // This endpoint should NEVER BE ENABLED IN PRODUCTION! -// if (appCfg.NODE_ENV !== "development" || !appCfg.isBddNockApiEnabled) { -// throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" }); -// } -// }; - -// server.route({ -// method: "POST", -// url: "/define", -// schema: { -// body: z.object({ definitions: z.unknown().array() }), -// response: { -// 200: z.object({ status: z.string() }) -// } -// }, -// onRequest: verifyAuth([AuthMode.JWT]), -// handler: async (req) => { -// checkIfBddNockApiEnabled(); -// const { body } = req; -// const { definitions } = body; -// logger.info(definitions, "Defining nock"); -// const processedDefinitions = definitions.map((definition: unknown) => { -// const { path, ...rest } = definition as Definition; -// return { -// ...rest, -// path: -// path !== undefined && typeof path === "string" -// ? path -// : new RegExp((path as unknown as { regex: string }).regex ?? "") -// } as Definition; -// }); - -// nock.define(processedDefinitions); -// // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call. -// if (!nock.isActive()) { -// nock.activate(); -// } -// return { status: "ok" }; -// } -// }); - -// server.route({ -// method: "POST", -// url: "/clean-all", -// schema: { -// response: { -// 200: z.object({ status: z.string() }) -// } -// }, -// onRequest: verifyAuth([AuthMode.JWT]), -// handler: async () => { -// checkIfBddNockApiEnabled(); -// logger.info("Cleaning all nocks"); -// nock.cleanAll(); -// return { status: "ok" }; -// } -// }); - -// server.route({ -// method: "POST", -// url: "/restore", -// schema: { -// response: { -// 200: z.object({ status: z.string() }) -// } -// }, -// onRequest: verifyAuth([AuthMode.JWT]), -// handler: async () => { -// checkIfBddNockApiEnabled(); -// logger.info("Restore network requests from nock"); -// nock.restore(); -// return { status: "ok" }; -// } -// }); -// }; diff --git a/backend/src/server/routes/v1/index.ts b/backend/src/server/routes/v1/index.ts index 68099e50e4..b480a5144a 100644 --- a/backend/src/server/routes/v1/index.ts +++ b/backend/src/server/routes/v1/index.ts @@ -8,7 +8,6 @@ import { registerSecretSyncRouter, SECRET_SYNC_REGISTER_ROUTER_MAP } from "@app/ import { registerAdminRouter } from "./admin-router"; import { registerAuthRoutes } from "./auth-router"; -// import { registerBddNockRouter } from "./bdd-nock-router"; import { registerProjectBotRouter } from "./bot-router"; import { registerCaRouter } from "./certificate-authority-router"; import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers"; @@ -238,10 +237,4 @@ export const registerV1Routes = async (server: FastifyZodProvider) => { await server.register(registerEventRouter, { prefix: "/events" }); await server.register(registerUpgradePathRouter, { prefix: "/upgrade-path" }); - - // Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests. - // This route should NEVER BE ENABLED IN PRODUCTION! - // if (getConfig().isBddNockApiEnabled) { - // await server.register(registerBddNockRouter, { prefix: "/bdd-nock" }); - // } }; diff --git a/backend/src/server/routes/v1/pki-sync-routers/aws-secrets-manager-pki-sync-router.ts b/backend/src/server/routes/v1/pki-sync-routers/aws-secrets-manager-pki-sync-router.ts new file mode 100644 index 0000000000..ca40c4b4f4 --- /dev/null +++ b/backend/src/server/routes/v1/pki-sync-routers/aws-secrets-manager-pki-sync-router.ts @@ -0,0 +1,22 @@ +import { + AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION, + AwsSecretsManagerPkiSyncSchema, + CreateAwsSecretsManagerPkiSyncSchema, + UpdateAwsSecretsManagerPkiSyncSchema +} from "@app/services/pki-sync/aws-secrets-manager"; +import { PkiSync } from "@app/services/pki-sync/pki-sync-enums"; + +import { registerSyncPkiEndpoints } from "./pki-sync-endpoints"; + +export const registerAwsSecretsManagerPkiSyncRouter = async (server: FastifyZodProvider) => + registerSyncPkiEndpoints({ + destination: PkiSync.AwsSecretsManager, + server, + responseSchema: AwsSecretsManagerPkiSyncSchema, + createSchema: CreateAwsSecretsManagerPkiSyncSchema, + updateSchema: UpdateAwsSecretsManagerPkiSyncSchema, + syncOptions: { + canImportCertificates: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION.canImportCertificates, + canRemoveCertificates: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION.canRemoveCertificates + } + }); diff --git a/backend/src/server/routes/v1/pki-sync-routers/chef-pki-sync-router.ts b/backend/src/server/routes/v1/pki-sync-routers/chef-pki-sync-router.ts new file mode 100644 index 0000000000..b7f04ebb44 --- /dev/null +++ b/backend/src/server/routes/v1/pki-sync-routers/chef-pki-sync-router.ts @@ -0,0 +1,17 @@ +import { ChefPkiSyncSchema, CreateChefPkiSyncSchema, UpdateChefPkiSyncSchema } from "@app/services/pki-sync/chef"; +import { PkiSync } from "@app/services/pki-sync/pki-sync-enums"; + +import { registerSyncPkiEndpoints } from "./pki-sync-endpoints"; + +export const registerChefPkiSyncRouter = async (server: FastifyZodProvider) => + registerSyncPkiEndpoints({ + destination: PkiSync.Chef, + server, + responseSchema: ChefPkiSyncSchema, + createSchema: CreateChefPkiSyncSchema, + updateSchema: UpdateChefPkiSyncSchema, + syncOptions: { + canImportCertificates: false, + canRemoveCertificates: true + } + }); diff --git a/backend/src/server/routes/v1/pki-sync-routers/index.ts b/backend/src/server/routes/v1/pki-sync-routers/index.ts index 4b81db27f2..e961d370cd 100644 --- a/backend/src/server/routes/v1/pki-sync-routers/index.ts +++ b/backend/src/server/routes/v1/pki-sync-routers/index.ts @@ -1,11 +1,15 @@ import { PkiSync } from "@app/services/pki-sync/pki-sync-enums"; import { registerAwsCertificateManagerPkiSyncRouter } from "./aws-certificate-manager-pki-sync-router"; +import { registerAwsSecretsManagerPkiSyncRouter } from "./aws-secrets-manager-pki-sync-router"; import { registerAzureKeyVaultPkiSyncRouter } from "./azure-key-vault-pki-sync-router"; +import { registerChefPkiSyncRouter } from "./chef-pki-sync-router"; export * from "./pki-sync-router"; export const PKI_SYNC_REGISTER_ROUTER_MAP: Record Promise> = { [PkiSync.AzureKeyVault]: registerAzureKeyVaultPkiSyncRouter, - [PkiSync.AwsCertificateManager]: registerAwsCertificateManagerPkiSyncRouter + [PkiSync.AwsCertificateManager]: registerAwsCertificateManagerPkiSyncRouter, + [PkiSync.AwsSecretsManager]: registerAwsSecretsManagerPkiSyncRouter, + [PkiSync.Chef]: registerChefPkiSyncRouter }; diff --git a/backend/src/server/routes/v3/certificates-router.ts b/backend/src/server/routes/v3/certificates-router.ts index f590aa1118..f13f77c346 100644 --- a/backend/src/server/routes/v3/certificates-router.ts +++ b/backend/src/server/routes/v3/certificates-router.ts @@ -23,6 +23,8 @@ import { mapEnumsForValidation } from "@app/services/certificate-common/certific import { EnrollmentType } from "@app/services/certificate-profile/certificate-profile-types"; import { validateTemplateRegexField } from "@app/services/certificate-template/certificate-template-validators"; +import { booleanSchema } from "../sanitizedSchemas"; + interface CertificateRequestForService { commonName?: string; keyUsages?: CertKeyUsageType[]; @@ -87,7 +89,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => ) .optional(), signatureAlgorithm: z.nativeEnum(CertSignatureAlgorithm), - keyAlgorithm: z.nativeEnum(CertKeyAlgorithm) + keyAlgorithm: z.nativeEnum(CertKeyAlgorithm), + removeRootsFromChain: booleanSchema.default(false).optional() }) .refine(validateTtlAndDateFields, { message: @@ -131,7 +134,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, profileId: req.body.profileId, - certificateRequest: mappedCertificateRequest + certificateRequest: mappedCertificateRequest, + removeRootsFromChain: req.body.removeRootsFromChain }); await server.services.auditLog.createAuditLog({ @@ -171,7 +175,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => .min(1, "TTL cannot be empty") .refine((val) => ms(val) > 0, "TTL must be a positive number"), notBefore: validateCaDateField.optional(), - notAfter: validateCaDateField.optional() + notAfter: validateCaDateField.optional(), + removeRootsFromChain: booleanSchema.default(false).optional() }) .refine(validateTtlAndDateFields, { message: @@ -206,7 +211,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => }, notBefore: req.body.notBefore ? new Date(req.body.notBefore) : undefined, notAfter: req.body.notAfter ? new Date(req.body.notAfter) : undefined, - enrollmentType: EnrollmentType.API + enrollmentType: EnrollmentType.API, + removeRootsFromChain: req.body.removeRootsFromChain }); await server.services.auditLog.createAuditLog({ @@ -262,7 +268,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => notAfter: validateCaDateField.optional(), commonName: validateTemplateRegexField.optional(), signatureAlgorithm: z.nativeEnum(CertSignatureAlgorithm), - keyAlgorithm: z.nativeEnum(CertKeyAlgorithm) + keyAlgorithm: z.nativeEnum(CertKeyAlgorithm), + removeRootsFromChain: booleanSchema.default(false).optional() }) .refine(validateTtlAndDateFields, { message: @@ -325,7 +332,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => notAfter: req.body.notAfter ? new Date(req.body.notAfter) : undefined, signatureAlgorithm: req.body.signatureAlgorithm, keyAlgorithm: req.body.keyAlgorithm - } + }, + removeRootsFromChain: req.body.removeRootsFromChain }); await server.services.auditLog.createAuditLog({ @@ -357,6 +365,11 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => params: z.object({ certificateId: z.string().uuid() }), + body: z + .object({ + removeRootsFromChain: booleanSchema.default(false).optional() + }) + .optional(), response: { 200: z.object({ certificate: z.string().trim(), @@ -375,7 +388,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) => actorId: req.permission.id, actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, - certificateId: req.params.certificateId + certificateId: req.params.certificateId, + removeRootsFromChain: req.body?.removeRootsFromChain }); await server.services.auditLog.createAuditLog({ diff --git a/backend/src/services/app-connection/app-connection-fns.ts b/backend/src/services/app-connection/app-connection-fns.ts index 863fa75f9a..aeef2b25ba 100644 --- a/backend/src/services/app-connection/app-connection-fns.ts +++ b/backend/src/services/app-connection/app-connection-fns.ts @@ -170,7 +170,8 @@ const PKI_APP_CONNECTIONS = [ AppConnection.AWS, AppConnection.Cloudflare, AppConnection.AzureADCS, - AppConnection.AzureKeyVault + AppConnection.AzureKeyVault, + AppConnection.Chef ]; export const listAppConnectionOptions = (projectType?: ProjectType) => { diff --git a/backend/src/services/certificate-common/certificate-utils.ts b/backend/src/services/certificate-common/certificate-utils.ts index b88f183db7..51754955f8 100644 --- a/backend/src/services/certificate-common/certificate-utils.ts +++ b/backend/src/services/certificate-common/certificate-utils.ts @@ -196,3 +196,62 @@ export const convertExtendedKeyUsageArrayToLegacy = ( ): CertExtendedKeyUsage[] | undefined => { return usages?.map(convertToLegacyExtendedKeyUsage); }; + +/** + * Parses a PEM-formatted certificate chain and returns individual certificates + * @param certificateChain - PEM-formatted certificate chain + * @returns Array of individual PEM certificates + */ +const parseCertificateChain = (certificateChain: string): string[] => { + if (!certificateChain || typeof certificateChain !== "string") { + return []; + } + + const certRegex = new RE2(/-----BEGIN CERTIFICATE-----[\s\S]*?-----END CERTIFICATE-----/g); + const certificates = certificateChain.match(certRegex); + + return certificates ? certificates.map((cert) => cert.trim()) : []; +}; + +/** + * Removes the root CA certificate from a certificate chain, leaving only intermediate certificates. + * If the chain contains only the root CA certificate, returns an empty string. + * + * @param certificateChain - PEM-formatted certificate chain containing leaf + intermediates + root CA + * @returns PEM-formatted certificate chain with only intermediate certificates (no root CA) + */ +export const removeRootCaFromChain = (certificateChain?: string): string => { + if (!certificateChain || typeof certificateChain !== "string") { + return ""; + } + + const certificates = parseCertificateChain(certificateChain); + + if (certificates.length === 0) { + return ""; + } + + const intermediateCerts = certificates.slice(0, -1); + + return intermediateCerts.join("\n"); +}; + +/** + * Extracts the root CA certificate from a certificate chain. + * + * @param certificateChain - PEM-formatted certificate chain containing leaf + intermediates + root CA + * @returns PEM-formatted root CA certificate, or empty string if not found + */ +export const extractRootCaFromChain = (certificateChain?: string): string => { + if (!certificateChain || typeof certificateChain !== "string") { + return ""; + } + + const certificates = parseCertificateChain(certificateChain); + + if (certificates.length === 0) { + return ""; + } + + return certificates[certificates.length - 1]; +}; diff --git a/backend/src/services/certificate-profile/certificate-profile-service.test.ts b/backend/src/services/certificate-profile/certificate-profile-service.test.ts index 1e31d5788a..3b75c10883 100644 --- a/backend/src/services/certificate-profile/certificate-profile-service.test.ts +++ b/backend/src/services/certificate-profile/certificate-profile-service.test.ts @@ -428,7 +428,13 @@ describe("CertificateProfileService", () => { service.createProfile({ ...mockActor, projectId: "project-123", - data: validProfileData + data: { + ...validProfileData, + enrollmentType: EnrollmentType.ACME, + acmeConfig: {}, + apiConfig: undefined, + estConfig: undefined + } }) ).rejects.toThrowError( new BadRequestError({ diff --git a/backend/src/services/certificate-v3/certificate-v3-service.ts b/backend/src/services/certificate-v3/certificate-v3-service.ts index 7b6538ab24..a537ddc06b 100644 --- a/backend/src/services/certificate-v3/certificate-v3-service.ts +++ b/backend/src/services/certificate-v3/certificate-v3-service.ts @@ -47,7 +47,8 @@ import { convertKeyUsageArrayFromLegacy, convertKeyUsageArrayToLegacy, mapEnumsForValidation, - normalizeDateForApi + normalizeDateForApi, + removeRootCaFromChain } from "../certificate-common/certificate-utils"; import { TCertificateSyncDALFactory } from "../certificate-sync/certificate-sync-dal"; import { TPkiSyncDALFactory } from "../pki-sync/pki-sync-dal"; @@ -366,7 +367,8 @@ export const certificateV3ServiceFactory = ({ actor, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + removeRootsFromChain }: TIssueCertificateFromProfileDTO): Promise => { const profile = await validateProfileAndPermissions( profileId, @@ -480,10 +482,15 @@ export const certificateV3ServiceFactory = ({ renewBeforeDays: finalRenewBeforeDays }); + let finalCertificateChain = bufferToString(certificateChain); + if (removeRootsFromChain) { + finalCertificateChain = removeRootCaFromChain(finalCertificateChain); + } + return { certificate: bufferToString(certificate), issuingCaCertificate: bufferToString(issuingCaCertificate), - certificateChain: bufferToString(certificateChain), + certificateChain: finalCertificateChain, privateKey: bufferToString(privateKey), serialNumber, certificateId: cert.id, @@ -503,7 +510,8 @@ export const certificateV3ServiceFactory = ({ actorId, actorAuthMethod, actorOrgId, - enrollmentType + enrollmentType, + removeRootsFromChain }: TSignCertificateFromProfileDTO): Promise> => { const profile = await validateProfileAndPermissions( profileId, @@ -590,7 +598,10 @@ export const certificateV3ServiceFactory = ({ }); const certificateString = extractCertificateFromBuffer(certificate as unknown as Buffer); - const certificateChainString = extractCertificateFromBuffer(certificateChain as unknown as Buffer); + let certificateChainString = extractCertificateFromBuffer(certificateChain as unknown as Buffer); + if (removeRootsFromChain) { + certificateChainString = removeRootCaFromChain(certificateChainString); + } return { certificate: certificateString, @@ -610,7 +621,8 @@ export const certificateV3ServiceFactory = ({ actor, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + removeRootsFromChain }: TOrderCertificateFromProfileDTO): Promise => { const profile = await validateProfileAndPermissions( profileId, @@ -665,7 +677,8 @@ export const certificateV3ServiceFactory = ({ actor, actorId, actorAuthMethod, - actorOrgId + actorOrgId, + removeRootsFromChain }); const orderId = randomUUID(); @@ -703,7 +716,8 @@ export const certificateV3ServiceFactory = ({ actorId, actorAuthMethod, actorOrgId, - internal = false + internal = false, + removeRootsFromChain }: TRenewCertificateDTO & { internal?: boolean }): Promise => { const renewalResult = await certificateDAL.transaction(async (tx) => { const originalCert = await certificateDAL.findById(certificateId, tx); @@ -929,10 +943,14 @@ export const certificateV3ServiceFactory = ({ pkiSyncQueue }); + let finalCertificateChain = renewalResult.certificateChain; + if (removeRootsFromChain) { + finalCertificateChain = removeRootCaFromChain(finalCertificateChain); + } return { certificate: renewalResult.certificate, issuingCaCertificate: renewalResult.issuingCaCertificate, - certificateChain: renewalResult.certificateChain, + certificateChain: finalCertificateChain, serialNumber: renewalResult.serialNumber, certificateId: renewalResult.newCert.id, projectId: renewalResult.profile.projectId, diff --git a/backend/src/services/certificate-v3/certificate-v3-types.ts b/backend/src/services/certificate-v3/certificate-v3-types.ts index 8a2cf70f77..ab638c5ed8 100644 --- a/backend/src/services/certificate-v3/certificate-v3-types.ts +++ b/backend/src/services/certificate-v3/certificate-v3-types.ts @@ -26,6 +26,7 @@ export type TIssueCertificateFromProfileDTO = { signatureAlgorithm?: string; keyAlgorithm?: string; }; + removeRootsFromChain?: boolean; } & Omit; export type TSignCertificateFromProfileDTO = { @@ -37,6 +38,7 @@ export type TSignCertificateFromProfileDTO = { notBefore?: Date; notAfter?: Date; enrollmentType: EnrollmentType; + removeRootsFromChain?: boolean; } & Omit; export type TOrderCertificateFromProfileDTO = { @@ -57,6 +59,7 @@ export type TOrderCertificateFromProfileDTO = { signatureAlgorithm?: string; keyAlgorithm?: string; }; + removeRootsFromChain?: boolean; } & Omit; export type TCertificateFromProfileResponse = { @@ -101,6 +104,7 @@ export type TCertificateOrderResponse = { export type TRenewCertificateDTO = { certificateId: string; + removeRootsFromChain?: boolean; } & Omit; export type TUpdateRenewalConfigDTO = { diff --git a/backend/src/services/offline-usage-report/offline-usage-report-service.ts b/backend/src/services/offline-usage-report/offline-usage-report-service.ts index 179232aa4a..1c34425a22 100644 --- a/backend/src/services/offline-usage-report/offline-usage-report-service.ts +++ b/backend/src/services/offline-usage-report/offline-usage-report-service.ts @@ -1,7 +1,8 @@ import crypto from "crypto"; +import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; -import { getConfig } from "@app/lib/config/env"; +import { LicenseType } from "@app/ee/services/license/license-types"; import { BadRequestError } from "@app/lib/errors"; import { TOfflineUsageReportDALFactory } from "./offline-usage-report-dal"; @@ -30,10 +31,13 @@ export const offlineUsageReportServiceFactory = ({ }; const generateUsageReportCSV = async () => { - const cfg = getConfig(); - if (!cfg.LICENSE_KEY_OFFLINE) { + const licenseKeyConfig = getLicenseKeyConfig(); + const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline; + + if (!hasOfflineLicense) { throw new BadRequestError({ - message: "Offline usage reports are not enabled. LICENSE_KEY_OFFLINE must be configured." + message: + "Offline usage reports are not enabled. Usage reports are only available for self-hosted offline instances" }); } diff --git a/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-schemas.ts b/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-schemas.ts index 3b9f5c8819..4dee71b827 100644 --- a/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-schemas.ts +++ b/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-schemas.ts @@ -14,6 +14,7 @@ export const AwsCertificateManagerPkiSyncConfigSchema = z.object({ const AwsCertificateManagerPkiSyncOptionsSchema = z.object({ canImportCertificates: z.boolean().default(false), canRemoveCertificates: z.boolean().default(true), + includeRootCa: z.boolean().default(false), preserveArn: z.boolean().default(true), certificateNameSchema: z .string() diff --git a/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-constants.ts b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-constants.ts new file mode 100644 index 0000000000..fd325a3f5f --- /dev/null +++ b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-constants.ts @@ -0,0 +1,71 @@ +import RE2 from "re2"; + +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { PkiSync } from "@app/services/pki-sync/pki-sync-enums"; + +/** + * AWS Secrets Manager naming constraints for secrets + */ +export const AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING = { + /** + * Regular expression pattern for valid AWS Secrets Manager secret names + * Must contain only alphanumeric characters, hyphens, and underscores + * Must be 1-512 characters long + */ + NAME_PATTERN: new RE2("^[\\w-]+$"), + + /** + * String of characters that are forbidden in AWS Secrets Manager secret names + */ + FORBIDDEN_CHARACTERS: " @#$%^&*()+=[]{}|;':\"<>?,./", + + /** + * Minimum length for secret names in AWS Secrets Manager + */ + MIN_LENGTH: 1, + + /** + * Maximum length for secret names in AWS Secrets Manager + */ + MAX_LENGTH: 512, + + /** + * String representation of the allowed character pattern (for UI display) + */ + ALLOWED_CHARACTER_PATTERN: "^[\\w-]+$" +} as const; + +export const AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS = { + INFISICAL_PREFIX: "infisical-", + DEFAULT_ENVIRONMENT: "production", + DEFAULT_CERTIFICATE_NAME_SCHEMA: "infisical-{{certificateId}}", + DEFAULT_FIELD_MAPPINGS: { + certificate: "certificate", + privateKey: "private_key", + certificateChain: "certificate_chain", + caCertificate: "ca_certificate" + } +}; + +export const AWS_SECRETS_MANAGER_PKI_SYNC_OPTIONS = { + DEFAULT_CAN_REMOVE_CERTIFICATES: true, + DEFAULT_PRESERVE_SECRET_ON_RENEWAL: true, + DEFAULT_UPDATE_EXISTING_CERTIFICATES: true, + DEFAULT_CAN_IMPORT_CERTIFICATES: false +}; + +/** + * AWS Secrets Manager PKI Sync list option configuration + */ +export const AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION = { + name: "AWS Secrets Manager" as const, + connection: AppConnection.AWS, + destination: PkiSync.AwsSecretsManager, + canImportCertificates: false, + canRemoveCertificates: true, + defaultCertificateNameSchema: "infisical-{{certificateId}}", + forbiddenCharacters: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS, + allowedCharacterPattern: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.ALLOWED_CHARACTER_PATTERN, + maxCertificateNameLength: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH, + minCertificateNameLength: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH +} as const; diff --git a/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-fns.ts b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-fns.ts new file mode 100644 index 0000000000..a385247a82 --- /dev/null +++ b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-fns.ts @@ -0,0 +1,555 @@ +/* eslint-disable no-continue */ +/* eslint-disable no-await-in-loop */ +import { + CreateSecretCommand, + DeleteSecretCommand, + ListSecretsCommand, + SecretsManagerClient, + UpdateSecretCommand +} from "@aws-sdk/client-secrets-manager"; +import RE2 from "re2"; + +import { TCertificateSyncs } from "@app/db/schemas"; +import { CustomAWSHasher } from "@app/lib/aws/hashing"; +import { crypto } from "@app/lib/crypto"; +import { logger } from "@app/lib/logger"; +import { AWSRegion } from "@app/services/app-connection/app-connection-enums"; +import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns"; +import { TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TCertificateSyncDALFactory } from "@app/services/certificate-sync/certificate-sync-dal"; +import { CertificateSyncStatus } from "@app/services/certificate-sync/certificate-sync-enums"; +import { createConnectionQueue, RateLimitConfig } from "@app/services/connection-queue"; +import { matchesCertificateNameSchema } from "@app/services/pki-sync/pki-sync-fns"; +import { TCertificateMap, TPkiSyncWithCredentials } from "@app/services/pki-sync/pki-sync-types"; + +import { AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS } from "./aws-secrets-manager-pki-sync-constants"; +import { + AwsSecretsManagerCertificateSecret, + SyncCertificatesResult, + TAwsSecretsManagerPkiSyncWithCredentials +} from "./aws-secrets-manager-pki-sync-types"; + +const AWS_SECRETS_MANAGER_RATE_LIMIT_CONFIG: RateLimitConfig = { + MAX_CONCURRENT_REQUESTS: 10, + BASE_DELAY: 1000, + MAX_DELAY: 30000, + MAX_RETRIES: 3, + RATE_LIMIT_STATUS_CODES: [429, 503] +}; + +const awsSecretsManagerConnectionQueue = createConnectionQueue(AWS_SECRETS_MANAGER_RATE_LIMIT_CONFIG); +const { withRateLimitRetry } = awsSecretsManagerConnectionQueue; + +const MAX_RETRIES = 10; + +const sleep = async () => + new Promise((resolve) => { + setTimeout(resolve, 1000); + }); + +const isInfisicalManagedCertificate = (secretName: string, pkiSync: TPkiSyncWithCredentials): boolean => { + const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined; + const certificateNameSchema = syncOptions?.certificateNameSchema; + + if (certificateNameSchema) { + const environment = AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.DEFAULT_ENVIRONMENT; + return matchesCertificateNameSchema(secretName, environment, certificateNameSchema); + } + + return secretName.startsWith(AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX); +}; + +const parseErrorMessage = (error: unknown): string => { + if (error instanceof Error) { + return error.message; + } + + if (typeof error === "string") { + return error; + } + + if (error && typeof error === "object" && "message" in error) { + const { message } = error as { message: unknown }; + if (typeof message === "string") { + return message; + } + } + + return "Unknown error occurred"; +}; + +const getSecretsManagerClient = async (pkiSync: TAwsSecretsManagerPkiSyncWithCredentials) => { + const { destinationConfig, connection } = pkiSync; + + const config = await getAwsConnectionConfig( + connection as TAwsConnectionConfig, + destinationConfig.region as AWSRegion + ); + + if (!config.credentials) { + throw new Error("AWS credentials not found in connection configuration"); + } + + const secretsManagerClient = new SecretsManagerClient({ + region: config.region, + useFipsEndpoint: crypto.isFipsModeEnabled(), + sha256: CustomAWSHasher, + credentials: config.credentials + }); + + return secretsManagerClient; +}; + +type TAwsSecretsManagerPkiSyncFactoryDeps = { + certificateDAL: Pick; + certificateSyncDAL: Pick< + TCertificateSyncDALFactory, + | "removeCertificates" + | "addCertificates" + | "findByPkiSyncAndCertificate" + | "updateById" + | "findByPkiSyncId" + | "updateSyncStatus" + >; +}; + +export const awsSecretsManagerPkiSyncFactory = ({ + certificateDAL, + certificateSyncDAL +}: TAwsSecretsManagerPkiSyncFactoryDeps) => { + const $getSecretsManagerSecrets = async ( + pkiSync: TAwsSecretsManagerPkiSyncWithCredentials, + syncId = "unknown" + ): Promise> => { + const client = await getSecretsManagerClient(pkiSync); + const secrets: Record = {}; + let hasNext = true; + let nextToken: string | undefined; + let attempt = 0; + + while (hasNext) { + try { + const currentToken = nextToken; + const output = await withRateLimitRetry( + () => client.send(new ListSecretsCommand({ NextToken: currentToken })), + { + operation: "list-secrets-manager-secrets", + syncId + } + ); + + attempt = 0; + + if (output.SecretList) { + output.SecretList.forEach((secretEntry) => { + if ( + secretEntry.Name && + isInfisicalManagedCertificate(secretEntry.Name, pkiSync as unknown as TPkiSyncWithCredentials) + ) { + secrets[secretEntry.Name] = secretEntry.ARN || secretEntry.Name; + } + }); + } + + hasNext = Boolean(output.NextToken); + nextToken = output.NextToken; + } catch (e) { + if ( + e && + typeof e === "object" && + "name" in e && + (e as { name: string }).name === "ThrottlingException" && + attempt < MAX_RETRIES + ) { + attempt += 1; + await sleep(); + continue; + } + throw e; + } + } + + return secrets; + }; + + const syncCertificates = async ( + pkiSync: TPkiSyncWithCredentials, + certificateMap: TCertificateMap + ): Promise => { + const awsPkiSync = pkiSync as unknown as TAwsSecretsManagerPkiSyncWithCredentials; + const client = await getSecretsManagerClient(awsPkiSync); + + const existingSecrets = await $getSecretsManagerSecrets(awsPkiSync, pkiSync.id); + + const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id); + const syncRecordsByCertId = new Map(); + const syncRecordsByExternalId = new Map(); + + existingSyncRecords.forEach((record: TCertificateSyncs) => { + if (record.certificateId) { + syncRecordsByCertId.set(record.certificateId, record); + } + if (record.externalIdentifier) { + syncRecordsByExternalId.set(record.externalIdentifier, record); + } + }); + + type CertificateUploadData = { + secretName: string; + certificateData: AwsSecretsManagerCertificateSecret; + certificateId: string; + isUpdate: boolean; + targetSecretName: string; + oldCertificateIdToRemove?: string; + }; + + const setCertificates: CertificateUploadData[] = []; + const validationErrors: Array<{ name: string; error: string }> = []; + + const syncOptions = pkiSync.syncOptions as + | { + canRemoveCertificates?: boolean; + preserveSecretOnRenewal?: boolean; + fieldMappings?: { + certificate?: string; + privateKey?: string; + certificateChain?: string; + caCertificate?: string; + }; + certificateNameSchema?: string; + } + | undefined; + + const canRemoveCertificates = syncOptions?.canRemoveCertificates ?? true; + const preserveSecretOnRenewal = syncOptions?.preserveSecretOnRenewal ?? true; + + const fieldMappings = { + certificate: syncOptions?.fieldMappings?.certificate ?? "certificate", + privateKey: syncOptions?.fieldMappings?.privateKey ?? "private_key", + certificateChain: syncOptions?.fieldMappings?.certificateChain ?? "certificate_chain", + caCertificate: syncOptions?.fieldMappings?.caCertificate ?? "ca_certificate" + }; + + const activeExternalIdentifiers = new Set(); + + for (const [certName, certData] of Object.entries(certificateMap)) { + const { cert, privateKey: certPrivateKey, certificateChain, caCertificate, certificateId } = certData; + + if (!cert || cert.trim().length === 0) { + validationErrors.push({ + name: certName, + error: "Certificate content is empty or missing" + }); + continue; + } + + if (!certPrivateKey || certPrivateKey.trim().length === 0) { + validationErrors.push({ + name: certName, + error: "Private key content is empty or missing" + }); + continue; + } + + if (!certificateId || typeof certificateId !== "string") { + continue; + } + + const certificateData: AwsSecretsManagerCertificateSecret = { + [fieldMappings.certificate]: cert, + [fieldMappings.privateKey]: certPrivateKey + }; + + if (certificateChain && certificateChain.trim().length > 0) { + certificateData[fieldMappings.certificateChain] = certificateChain; + } + + if (caCertificate && typeof caCertificate === "string" && caCertificate.trim().length > 0) { + certificateData[fieldMappings.caCertificate] = caCertificate; + } + + let targetSecretName = certName; + if (syncOptions?.certificateNameSchema) { + const extendedCertData = certData as Record; + const safeCommonName = typeof extendedCertData.commonName === "string" ? extendedCertData.commonName : ""; + + targetSecretName = syncOptions.certificateNameSchema + .replace(new RE2("\\{\\{certificateId\\}\\}", "g"), certificateId) + .replace(new RE2("\\{\\{commonName\\}\\}", "g"), safeCommonName); + } else { + targetSecretName = `${AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX}${certificateId}`; + } + + const certificate = await certificateDAL.findById(certificateId); + + if (certificate?.renewedByCertificateId) { + continue; + } + + const syncRecordLookupId = certificate?.renewedFromCertificateId || certificateId; + const existingRecord = syncRecordsByCertId.get(syncRecordLookupId); + + let shouldProcess = true; + let isUpdate = false; + + if (existingRecord?.externalIdentifier) { + const existingSecret = existingSecrets[existingRecord.externalIdentifier]; + + if (existingSecret) { + if (certificate?.renewedFromCertificateId && preserveSecretOnRenewal) { + targetSecretName = existingRecord.externalIdentifier; + isUpdate = true; + } else if (certificate?.renewedFromCertificateId && !preserveSecretOnRenewal) { + activeExternalIdentifiers.add(existingRecord.externalIdentifier); + } else if (!certificate?.renewedFromCertificateId) { + activeExternalIdentifiers.add(existingRecord.externalIdentifier); + shouldProcess = false; + } + } + } + + if (!shouldProcess) { + continue; + } + + if (existingSecrets[targetSecretName]) { + isUpdate = true; + } + + activeExternalIdentifiers.add(targetSecretName); + + setCertificates.push({ + secretName: certName, + certificateData, + certificateId, + isUpdate, + targetSecretName, + oldCertificateIdToRemove: + certificate?.renewedFromCertificateId && preserveSecretOnRenewal + ? certificate.renewedFromCertificateId + : undefined + }); + } + + const result: SyncCertificatesResult = { + uploaded: 0, + updated: 0, + removed: 0, + failedRemovals: 0, + skipped: 0, + details: { + failedUploads: [], + failedRemovals: [], + validationErrors + } + }; + + for (const certData of setCertificates) { + const { secretName, certificateData, certificateId, isUpdate, targetSecretName, oldCertificateIdToRemove } = + certData; + + try { + const secretValue = JSON.stringify(certificateData); + const configKeyId: unknown = awsPkiSync.destinationConfig.keyId; + const keyId: string = typeof configKeyId === "string" ? configKeyId : "alias/aws/secretsmanager"; + + if (isUpdate) { + await withRateLimitRetry( + () => + client.send( + new UpdateSecretCommand({ + SecretId: targetSecretName, + SecretString: secretValue, + KmsKeyId: keyId + }) + ), + { + operation: "update-secret", + syncId: pkiSync.id + } + ); + result.updated += 1; + } else { + await withRateLimitRetry( + () => + client.send( + new CreateSecretCommand({ + Name: targetSecretName, + SecretString: secretValue, + KmsKeyId: keyId, + Description: `Certificate managed by Infisical` + }) + ), + { + operation: "create-secret", + syncId: pkiSync.id + } + ); + result.uploaded += 1; + } + + const existingRecord = syncRecordsByCertId.get(certificateId); + if (existingRecord?.id) { + await certificateSyncDAL.updateById(existingRecord.id, { + externalIdentifier: targetSecretName, + syncStatus: CertificateSyncStatus.Succeeded, + lastSyncedAt: new Date(), + lastSyncMessage: "Certificate successfully synced to AWS Secrets Manager" + }); + + if (oldCertificateIdToRemove && oldCertificateIdToRemove !== certificateId) { + await certificateSyncDAL.removeCertificates(pkiSync.id, [oldCertificateIdToRemove]); + } + } else { + await certificateSyncDAL.addCertificates(pkiSync.id, [ + { + certificateId, + externalIdentifier: targetSecretName + } + ]); + + const newCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId); + if (newCertSync?.id) { + await certificateSyncDAL.updateById(newCertSync.id, { + syncStatus: CertificateSyncStatus.Succeeded, + lastSyncedAt: new Date(), + lastSyncMessage: "Certificate successfully synced to AWS Secrets Manager" + }); + } + } + } catch (error) { + result.details?.failedUploads?.push({ + name: secretName, + error: parseErrorMessage(error) + }); + logger.error( + { + secretName, + certificateId, + error: parseErrorMessage(error), + pkiSyncId: pkiSync.id + }, + "Failed to sync certificate" + ); + + const existingRecord = syncRecordsByCertId.get(certificateId); + if (existingRecord?.id) { + await certificateSyncDAL.updateById(existingRecord.id, { + syncStatus: CertificateSyncStatus.Failed, + lastSyncMessage: parseErrorMessage(error) + }); + } + } + } + + if (canRemoveCertificates) { + for (const [secretName] of Object.entries(existingSecrets)) { + if (!activeExternalIdentifiers.has(secretName)) { + try { + await withRateLimitRetry( + () => + client.send( + new DeleteSecretCommand({ + SecretId: secretName, + ForceDeleteWithoutRecovery: true + }) + ), + { + operation: "delete-secret", + syncId: pkiSync.id + } + ); + + result.removed += 1; + } catch (error) { + result.failedRemovals += 1; + result.details?.failedRemovals?.push({ + name: secretName, + error: parseErrorMessage(error) + }); + logger.error( + { + secretName, + error: parseErrorMessage(error), + pkiSyncId: pkiSync.id + }, + "Failed to remove certificate secret" + ); + } + } + } + } + + return result; + }; + + const removeCertificates = async ( + pkiSync: TPkiSyncWithCredentials, + certificateMap: TCertificateMap + ): Promise<{ removed: number; failed: number }> => { + const awsPkiSync = pkiSync as unknown as TAwsSecretsManagerPkiSyncWithCredentials; + const client = await getSecretsManagerClient(awsPkiSync); + + const existingSecrets = await $getSecretsManagerSecrets(awsPkiSync, pkiSync.id); + const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id); + + let removed = 0; + let failed = 0; + + for (const [, certData] of Object.entries(certificateMap)) { + if (!certData.certificateId) continue; + + const syncRecord = existingSyncRecords.find((record) => record.certificateId === certData.certificateId); + if (!syncRecord?.externalIdentifier) continue; + + const secretName = syncRecord.externalIdentifier; + + if (existingSecrets[secretName]) { + try { + await withRateLimitRetry( + () => + client.send( + new DeleteSecretCommand({ + SecretId: secretName, + ForceDeleteWithoutRecovery: true + }) + ), + { + operation: "delete-secret", + syncId: pkiSync.id + } + ); + + if (syncRecord.id) { + await certificateSyncDAL.updateById(syncRecord.id, { + syncStatus: CertificateSyncStatus.Failed + }); + } + + removed += 1; + } catch (error) { + failed += 1; + logger.error( + { + secretName, + certificateId: certData.certificateId, + error: parseErrorMessage(error), + pkiSyncId: pkiSync.id + }, + "Failed to remove certificate secret" + ); + } + } + } + + return { removed, failed }; + }; + + return { + syncCertificates, + removeCertificates + }; +}; + +export type TAwsSecretsManagerPkiSyncFactory = ReturnType; diff --git a/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-schemas.ts b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-schemas.ts new file mode 100644 index 0000000000..3005357a51 --- /dev/null +++ b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-schemas.ts @@ -0,0 +1,104 @@ +import RE2 from "re2"; +import { z } from "zod"; + +import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums"; +import { PkiSync } from "@app/services/pki-sync/pki-sync-enums"; +import { PkiSyncSchema } from "@app/services/pki-sync/pki-sync-schemas"; + +import { AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING } from "./aws-secrets-manager-pki-sync-constants"; + +export const AwsSecretsManagerPkiSyncConfigSchema = z.object({ + region: z.nativeEnum(AWSRegion), + keyId: z.string().trim().optional() +}); + +export const AwsSecretsManagerFieldMappingsSchema = z.object({ + certificate: z.string().min(1, "Certificate field name is required").default("certificate"), + privateKey: z.string().min(1, "Private key field name is required").default("private_key"), + certificateChain: z.string().min(1, "Certificate chain field name is required").default("certificate_chain"), + caCertificate: z.string().min(1, "CA certificate field name is required").default("ca_certificate") +}); + +const AwsSecretsManagerPkiSyncOptionsSchema = z.object({ + canImportCertificates: z.boolean().default(false), + canRemoveCertificates: z.boolean().default(true), + includeRootCa: z.boolean().default(false), + preserveSecretOnRenewal: z.boolean().default(true), + updateExistingCertificates: z.boolean().default(true), + certificateNameSchema: z + .string() + .optional() + .refine( + (schema) => { + if (!schema) return true; + + if (!schema.includes("{{certificateId}}")) { + return false; + } + + const testName = schema + .replace(new RE2("\\{\\{certificateId\\}\\}", "g"), "test-cert-id") + .replace(new RE2("\\{\\{profileId\\}\\}", "g"), "test-profile-id") + .replace(new RE2("\\{\\{commonName\\}\\}", "g"), "test-common-name") + .replace(new RE2("\\{\\{friendlyName\\}\\}", "g"), "test-friendly-name") + .replace(new RE2("\\{\\{environment\\}\\}", "g"), "test-env"); + + const hasForbiddenChars = AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS.split("").some( + (char) => testName.includes(char) + ); + + return ( + AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.NAME_PATTERN.test(testName) && + !hasForbiddenChars && + testName.length >= AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH && + testName.length <= AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH + ); + }, + { + message: + "Certificate name schema must include {{certificateId}} placeholder and result in names that contain only alphanumeric characters, underscores, and hyphens and be 1-512 characters long for AWS Secrets Manager." + } + ), + fieldMappings: AwsSecretsManagerFieldMappingsSchema.optional().default({ + certificate: "certificate", + privateKey: "private_key", + certificateChain: "certificate_chain", + caCertificate: "ca_certificate" + }) +}); + +export const AwsSecretsManagerPkiSyncSchema = PkiSyncSchema.extend({ + destination: z.literal(PkiSync.AwsSecretsManager), + destinationConfig: AwsSecretsManagerPkiSyncConfigSchema, + syncOptions: AwsSecretsManagerPkiSyncOptionsSchema +}); + +export const CreateAwsSecretsManagerPkiSyncSchema = z.object({ + name: z.string().trim().min(1).max(64), + description: z.string().optional(), + isAutoSyncEnabled: z.boolean().default(true), + destinationConfig: AwsSecretsManagerPkiSyncConfigSchema, + syncOptions: AwsSecretsManagerPkiSyncOptionsSchema.optional().default({}), + subscriberId: z.string().nullish(), + connectionId: z.string(), + projectId: z.string().trim().min(1), + certificateIds: z.array(z.string().uuid()).optional() +}); + +export const UpdateAwsSecretsManagerPkiSyncSchema = z.object({ + name: z.string().trim().min(1).max(64).optional(), + description: z.string().optional(), + isAutoSyncEnabled: z.boolean().optional(), + destinationConfig: AwsSecretsManagerPkiSyncConfigSchema.optional(), + syncOptions: AwsSecretsManagerPkiSyncOptionsSchema.optional(), + subscriberId: z.string().nullish(), + connectionId: z.string().optional() +}); + +export const AwsSecretsManagerPkiSyncListItemSchema = z.object({ + name: z.literal("AWS Secrets Manager"), + connection: z.literal(AppConnection.AWS), + destination: z.literal(PkiSync.AwsSecretsManager), + canImportCertificates: z.literal(false), + canRemoveCertificates: z.literal(true) +}); diff --git a/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-types.ts b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-types.ts new file mode 100644 index 0000000000..7c4a5f8a14 --- /dev/null +++ b/backend/src/services/pki-sync/aws-secrets-manager/aws-secrets-manager-pki-sync-types.ts @@ -0,0 +1,59 @@ +import { z } from "zod"; + +import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types"; + +import { + AwsSecretsManagerFieldMappingsSchema, + AwsSecretsManagerPkiSyncConfigSchema, + AwsSecretsManagerPkiSyncSchema, + CreateAwsSecretsManagerPkiSyncSchema, + UpdateAwsSecretsManagerPkiSyncSchema +} from "./aws-secrets-manager-pki-sync-schemas"; + +export type TAwsSecretsManagerPkiSyncConfig = z.infer; + +export type TAwsSecretsManagerFieldMappings = z.infer; + +export type TAwsSecretsManagerPkiSync = z.infer; + +export type TAwsSecretsManagerPkiSyncInput = z.infer; + +export type TAwsSecretsManagerPkiSyncUpdate = z.infer; + +export type TAwsSecretsManagerPkiSyncWithCredentials = TAwsSecretsManagerPkiSync & { + connection: TAwsConnection; + appConnectionName: string; + appConnectionApp: string; +}; + +export interface AwsSecretsManagerCertificateSecret { + [key: string]: string; +} + +export interface SyncCertificatesResult { + uploaded: number; + updated: number; + removed: number; + failedRemovals: number; + skipped: number; + details?: { + failedUploads?: Array<{ name: string; error: string }>; + failedRemovals?: Array<{ name: string; error: string }>; + validationErrors?: Array<{ name: string; error: string }>; + }; +} + +export interface RemoveCertificatesResult { + removed: number; + failed: number; + skipped: number; +} + +export interface CertificateImportRequest { + name: string; + certificate: string; + privateKey: string; + certificateChain?: string; + caCertificate?: string; + certificateId?: string; +} diff --git a/backend/src/services/pki-sync/aws-secrets-manager/index.ts b/backend/src/services/pki-sync/aws-secrets-manager/index.ts new file mode 100644 index 0000000000..7f6e116649 --- /dev/null +++ b/backend/src/services/pki-sync/aws-secrets-manager/index.ts @@ -0,0 +1,4 @@ +export * from "./aws-secrets-manager-pki-sync-constants"; +export * from "./aws-secrets-manager-pki-sync-fns"; +export * from "./aws-secrets-manager-pki-sync-schemas"; +export * from "./aws-secrets-manager-pki-sync-types"; diff --git a/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-schemas.ts b/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-schemas.ts index 90f4a119bf..ab66d9b4aa 100644 --- a/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-schemas.ts +++ b/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-schemas.ts @@ -14,6 +14,7 @@ export const AzureKeyVaultPkiSyncConfigSchema = z.object({ const AzureKeyVaultPkiSyncOptionsSchema = z.object({ canImportCertificates: z.boolean().default(false), canRemoveCertificates: z.boolean().default(true), + includeRootCa: z.boolean().default(false), enableVersioning: z.boolean().default(true), certificateNameSchema: z .string() diff --git a/backend/src/services/pki-sync/chef/chef-pki-sync-constants.ts b/backend/src/services/pki-sync/chef/chef-pki-sync-constants.ts new file mode 100644 index 0000000000..c466bbd56e --- /dev/null +++ b/backend/src/services/pki-sync/chef/chef-pki-sync-constants.ts @@ -0,0 +1,23 @@ +import RE2 from "re2"; + +export const CHEF_PKI_SYNC_CERTIFICATE_NAMING = { + NAME_PATTERN: new RE2("^[a-zA-Z0-9_-]+$"), + FORBIDDEN_CHARACTERS: "[]{}()<>|\\:;\"'=+*&^%$#@!~`?/", + MIN_LENGTH: 1, + MAX_LENGTH: 255, + DEFAULT_SCHEMA: "{{certificateId}}" +}; + +export const CHEF_PKI_SYNC_DATA_BAG_NAMING = { + NAME_PATTERN: new RE2("^[a-zA-Z0-9_-]+$"), + FORBIDDEN_CHARACTERS: "[]{}()<>|\\:;\"'=+*&^%$#@!~`?/.", + MIN_LENGTH: 1, + MAX_LENGTH: 255 +}; + +export const CHEF_PKI_SYNC_DEFAULTS = { + CERTIFICATE_DATA_BAG: "ssl_certificates", + ITEM_NAME_TEMPLATE: "{{certificateId}}", + INFISICAL_PREFIX: "Infisical-", + DEFAULT_ENVIRONMENT: "global" +} as const; diff --git a/backend/src/services/pki-sync/chef/chef-pki-sync-fns.ts b/backend/src/services/pki-sync/chef/chef-pki-sync-fns.ts new file mode 100644 index 0000000000..bf740c6600 --- /dev/null +++ b/backend/src/services/pki-sync/chef/chef-pki-sync-fns.ts @@ -0,0 +1,595 @@ +/* eslint-disable no-continue */ +/* eslint-disable no-await-in-loop */ +import { TCertificateSyncs } from "@app/db/schemas"; +import { + createChefDataBagItem, + listChefDataBagItems, + removeChefDataBagItem, + updateChefDataBagItem +} from "@app/ee/services/app-connections/chef"; +import { TChefDataBagItemContent } from "@app/ee/services/secret-sync/chef"; +import { logger } from "@app/lib/logger"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TCertificateSyncDALFactory } from "@app/services/certificate-sync/certificate-sync-dal"; +import { CertificateSyncStatus } from "@app/services/certificate-sync/certificate-sync-enums"; +import { createConnectionQueue, RateLimitConfig } from "@app/services/connection-queue"; +import { matchesCertificateNameSchema } from "@app/services/pki-sync/pki-sync-fns"; +import { TCertificateMap, TPkiSyncWithCredentials } from "@app/services/pki-sync/pki-sync-types"; + +import { CHEF_PKI_SYNC_DEFAULTS } from "./chef-pki-sync-constants"; +import { ChefCertificateDataBagItem, SyncCertificatesResult, TChefPkiSyncWithCredentials } from "./chef-pki-sync-types"; + +const CHEF_RATE_LIMIT_CONFIG: RateLimitConfig = { + MAX_CONCURRENT_REQUESTS: 5, // Chef servers generally have lower rate limits + BASE_DELAY: 1500, + MAX_DELAY: 30000, + MAX_RETRIES: 3, + RATE_LIMIT_STATUS_CODES: [429, 503] +}; + +const chefConnectionQueue = createConnectionQueue(CHEF_RATE_LIMIT_CONFIG); +const { withRateLimitRetry } = chefConnectionQueue; + +const isInfisicalManagedCertificate = (certificateName: string, pkiSync: TPkiSyncWithCredentials): boolean => { + const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined; + const certificateNameSchema = syncOptions?.certificateNameSchema; + + if (certificateNameSchema) { + const environment = CHEF_PKI_SYNC_DEFAULTS.DEFAULT_ENVIRONMENT; + return matchesCertificateNameSchema(certificateName, environment, certificateNameSchema); + } + + return certificateName.startsWith(CHEF_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX); +}; + +const parseErrorMessage = (error: unknown): string => { + if (error instanceof Error) { + return error.message; + } + + if (typeof error === "string") { + return error; + } + + if (error && typeof error === "object" && "message" in error) { + const { message } = error as { message: unknown }; + if (typeof message === "string") { + return message; + } + } + + return "Unknown error occurred"; +}; + +type TChefPkiSyncFactoryDeps = { + certificateDAL: Pick; + certificateSyncDAL: Pick< + TCertificateSyncDALFactory, + | "removeCertificates" + | "addCertificates" + | "findByPkiSyncAndCertificate" + | "updateById" + | "findByPkiSyncId" + | "updateSyncStatus" + >; +}; + +export const chefPkiSyncFactory = ({ certificateDAL, certificateSyncDAL }: TChefPkiSyncFactoryDeps) => { + const $getChefDataBagItems = async ( + pkiSync: TChefPkiSyncWithCredentials, + syncId = "unknown" + ): Promise> => { + const { + connection, + destinationConfig: { dataBagName } + } = pkiSync; + const { serverUrl, userName, privateKey, orgName } = connection.credentials; + + const dataBagItems = await withRateLimitRetry( + () => + listChefDataBagItems( + { + credentials: { serverUrl, userName, privateKey, orgName } + } as Parameters[0], + dataBagName + ), + { + operation: "list-chef-data-bag-items", + syncId + } + ); + + const chefDataBagItems: Record = {}; + dataBagItems.forEach((item) => { + chefDataBagItems[item.name] = true; + }); + + return chefDataBagItems; + }; + + const syncCertificates = async ( + pkiSync: TPkiSyncWithCredentials, + certificateMap: TCertificateMap + ): Promise => { + const chefPkiSync = pkiSync as unknown as TChefPkiSyncWithCredentials; + const { + connection, + destinationConfig: { dataBagName } + } = chefPkiSync; + const { serverUrl, userName, privateKey, orgName } = connection.credentials; + + const chefDataBagItems = await $getChefDataBagItems(chefPkiSync, pkiSync.id); + + const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id); + const syncRecordsByCertId = new Map(); + const syncRecordsByExternalId = new Map(); + + existingSyncRecords.forEach((record: TCertificateSyncs) => { + if (record.certificateId) { + syncRecordsByCertId.set(record.certificateId, record); + } + if (record.externalIdentifier) { + syncRecordsByExternalId.set(record.externalIdentifier, record); + } + }); + + type CertificateUploadData = { + key: string; + name: string; + cert: string; + privateKey: string; + certificateChain?: string; + caCertificate?: string; + certificateId: string; + isUpdate: boolean; + targetItemName: string; + oldCertificateIdToRemove?: string; + }; + + const setCertificates: CertificateUploadData[] = []; + + const validationErrors: Array<{ name: string; error: string }> = []; + + const syncOptions = pkiSync.syncOptions as + | { + canRemoveCertificates?: boolean; + preserveItemOnRenewal?: boolean; + fieldMappings?: { + certificate?: string; + privateKey?: string; + certificateChain?: string; + caCertificate?: string; + metadata?: string; + }; + } + | undefined; + const canRemoveCertificates = syncOptions?.canRemoveCertificates ?? true; + const preserveItemOnRenewal = syncOptions?.preserveItemOnRenewal ?? true; + + const fieldMappings = { + certificate: syncOptions?.fieldMappings?.certificate ?? "certificate", + privateKey: syncOptions?.fieldMappings?.privateKey ?? "private_key", + certificateChain: syncOptions?.fieldMappings?.certificateChain ?? "certificate_chain", + caCertificate: syncOptions?.fieldMappings?.caCertificate ?? "ca_certificate" + }; + + const activeExternalIdentifiers = new Set(); + + for (const [certName, certData] of Object.entries(certificateMap)) { + const { cert, privateKey: certPrivateKey, certificateChain, caCertificate, certificateId } = certData; + + if (!cert || cert.trim().length === 0) { + validationErrors.push({ + name: certName, + error: "Certificate content is empty or missing" + }); + continue; + } + + if (!certPrivateKey || certPrivateKey.trim().length === 0) { + validationErrors.push({ + name: certName, + error: "Private key content is empty or missing" + }); + continue; + } + + if (!certificateId || typeof certificateId !== "string") { + continue; + } + + const targetCertificateName = certName; + + const certificate = await certificateDAL.findById(certificateId); + + if (certificate?.renewedByCertificateId) { + continue; + } + + const syncRecordLookupId = certificate?.renewedFromCertificateId || certificateId; + const existingSyncRecord = syncRecordsByCertId.get(syncRecordLookupId); + + let shouldProcess = true; + let isUpdate = false; + let targetItemName = targetCertificateName; + + if (existingSyncRecord?.externalIdentifier) { + const existingChefItem = chefDataBagItems[existingSyncRecord.externalIdentifier]; + + if (existingChefItem) { + if (certificate?.renewedFromCertificateId && preserveItemOnRenewal) { + targetItemName = existingSyncRecord.externalIdentifier; + isUpdate = true; + } else if (!certificate?.renewedFromCertificateId) { + shouldProcess = false; + } + } + } + + if (!shouldProcess) { + continue; + } + + setCertificates.push({ + key: certName, + name: certName, + cert, + privateKey: certPrivateKey, + certificateChain, + caCertificate, + certificateId, + isUpdate, + targetItemName, + oldCertificateIdToRemove: + certificate?.renewedFromCertificateId && preserveItemOnRenewal + ? certificate.renewedFromCertificateId + : undefined + }); + + activeExternalIdentifiers.add(targetItemName); + } + + type UploadResult = + | { status: "fulfilled"; certificate: CertificateUploadData } + | { status: "rejected"; certificate: CertificateUploadData; error: unknown }; + + const uploadPromises = setCertificates.map(async (certificateData): Promise => { + const { + targetItemName, + cert, + privateKey: certPrivateKey, + certificateChain, + caCertificate, + certificateId + } = certificateData; + + try { + const chefDataBagItem: ChefCertificateDataBagItem = { + id: targetItemName, + [fieldMappings.certificate]: cert, + [fieldMappings.privateKey]: certPrivateKey, + ...(certificateChain && { [fieldMappings.certificateChain]: certificateChain }), + ...(caCertificate && { [fieldMappings.caCertificate]: caCertificate }) + }; + + const itemExists = chefDataBagItems[targetItemName] === true; + + if (itemExists) { + await withRateLimitRetry( + () => + updateChefDataBagItem({ + serverUrl, + userName, + privateKey, + orgName, + dataBagName, + dataBagItemName: targetItemName, + data: chefDataBagItem as unknown as TChefDataBagItemContent + }), + { + operation: "update-chef-data-bag-item", + syncId: pkiSync.id + } + ); + } else { + await withRateLimitRetry( + () => + createChefDataBagItem({ + serverUrl, + userName, + privateKey, + orgName, + dataBagName, + data: chefDataBagItem as unknown as TChefDataBagItemContent + }), + { + operation: "create-chef-data-bag-item", + syncId: pkiSync.id + } + ); + } + + return { status: "fulfilled" as const, certificate: certificateData }; + } catch (error) { + logger.error( + { + syncId: pkiSync.id, + certificateId, + targetItemName, + error: error instanceof Error ? error.message : String(error) + }, + "Failed to sync certificate to Chef" + ); + return { status: "rejected" as const, certificate: certificateData, error }; + } + }); + + const uploadResults = await Promise.allSettled(uploadPromises); + + const successfulUploads = uploadResults.filter( + (result): result is PromiseFulfilledResult => + result.status === "fulfilled" && result.value.status === "fulfilled" + ); + const failedUploads = uploadResults.filter( + ( + result + ): result is + | PromiseRejectedResult + | PromiseFulfilledResult<{ status: "rejected"; certificate: CertificateUploadData; error: unknown }> => + result.status === "rejected" || (result.status === "fulfilled" && result.value.status === "rejected") + ); + + let removedCount = 0; + let failedRemovals: Array<{ name: string; error: string }> = []; + + if (canRemoveCertificates) { + const itemsToRemove: string[] = []; + + Object.keys(chefDataBagItems).forEach((itemName) => { + if (!activeExternalIdentifiers.has(itemName) && isInfisicalManagedCertificate(itemName, pkiSync)) { + itemsToRemove.push(itemName); + } + }); + + if (itemsToRemove.length > 0) { + const removalPromises = itemsToRemove.map(async (itemName) => { + try { + await withRateLimitRetry( + () => + removeChefDataBagItem({ + serverUrl, + userName, + privateKey, + orgName, + dataBagName, + dataBagItemName: itemName + }), + { + operation: "remove-chef-data-bag-item", + syncId: pkiSync.id + } + ); + + const syncRecord = syncRecordsByExternalId.get(itemName); + if (syncRecord?.certificateId) { + await certificateSyncDAL.removeCertificates(pkiSync.id, [syncRecord.certificateId]); + } + + return { status: "fulfilled" as const, itemName }; + } catch (error) { + logger.error( + { + syncId: pkiSync.id, + itemName, + error: error instanceof Error ? error.message : String(error) + }, + "Failed to remove Chef data bag item" + ); + return { status: "rejected" as const, itemName, error }; + } + }); + + const removalResults = await Promise.allSettled(removalPromises); + + const successfulRemovals = removalResults.filter( + (result): result is PromiseFulfilledResult<{ status: "fulfilled"; itemName: string }> => + result.status === "fulfilled" && result.value.status === "fulfilled" + ); + removedCount = successfulRemovals.length; + + const failedRemovalPromises = removalResults.filter( + ( + result + ): result is + | PromiseRejectedResult + | PromiseFulfilledResult<{ status: "rejected"; itemName: string; error: unknown }> => + result.status === "rejected" || (result.status === "fulfilled" && result.value.status === "rejected") + ); + + failedRemovals = failedRemovalPromises.map((result) => { + if (result.status === "rejected") { + return { + name: "unknown", + error: parseErrorMessage(result.reason) + }; + } + const { itemName, error } = result.value; + return { + name: String(itemName), + error: parseErrorMessage(error) + }; + }); + } + } + + for (const result of successfulUploads) { + const { certificateId, targetItemName, oldCertificateIdToRemove } = result.value.certificate; + + if (certificateId && typeof certificateId === "string") { + const existingCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId); + if (existingCertSync) { + await certificateSyncDAL.updateById(existingCertSync.id, { + externalIdentifier: targetItemName, + syncStatus: CertificateSyncStatus.Succeeded, + lastSyncedAt: new Date(), + lastSyncMessage: "Certificate successfully synced to destination" + }); + } else { + await certificateSyncDAL.addCertificates(pkiSync.id, [ + { + certificateId, + externalIdentifier: targetItemName + } + ]); + + const newCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId); + if (newCertSync) { + await certificateSyncDAL.updateById(newCertSync.id, { + syncStatus: CertificateSyncStatus.Succeeded, + lastSyncedAt: new Date(), + lastSyncMessage: "Certificate successfully synced to destination" + }); + } + } + + if (oldCertificateIdToRemove) { + await certificateSyncDAL.removeCertificates(pkiSync.id, [oldCertificateIdToRemove]); + } + } + } + + await Promise.all( + failedUploads.map(async (result) => { + let certificateId: string; + let errorMessage: string; + + if (result.status === "rejected") { + certificateId = "unknown"; + errorMessage = result.reason instanceof Error ? result.reason.message : String(result.reason); + return; + } + + const { certificate, error } = result.value; + certificateId = certificate.certificateId; + errorMessage = error instanceof Error ? error.message : String(error); + + const existingSyncRecord = syncRecordsByCertId.get(certificateId); + if (existingSyncRecord) { + await certificateSyncDAL.updateSyncStatus( + pkiSync.id, + certificateId, + CertificateSyncStatus.Failed, + errorMessage + ); + } + }) + ); + + return { + uploaded: successfulUploads.filter((result) => !result.value.certificate.isUpdate).length, + updated: successfulUploads.filter((result) => result.value.certificate.isUpdate).length, + removed: removedCount, + failedRemovals: failedRemovals.length, + skipped: validationErrors.length, + details: { + failedUploads: failedUploads.map((result) => { + if (result.status === "rejected") { + return { + name: "unknown", + error: result.reason instanceof Error ? result.reason.message : String(result.reason) + }; + } + const { certificate, error } = result.value; + return { + name: certificate.name, + error: error instanceof Error ? error.message : String(error) + }; + }), + failedRemovals, + validationErrors + } + }; + }; + + const importCertificates = async (): Promise => { + throw new Error("Chef PKI Sync does not support importing certificates from Chef data bags"); + }; + + const removeCertificates = async ( + sync: TPkiSyncWithCredentials, + certificateNames: string[], + deps?: { certificateSyncDAL?: TCertificateSyncDALFactory; certificateMap?: TCertificateMap } + ): Promise => { + const chefPkiSync = sync as unknown as TChefPkiSyncWithCredentials; + const { + connection, + destinationConfig: { dataBagName } + } = chefPkiSync; + const { serverUrl, userName, privateKey, orgName } = connection.credentials; + + const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(sync.id); + const certificateIdsToRemove: string[] = []; + const itemsToRemove: string[] = []; + + for (const certName of certificateNames) { + const certificateData = deps?.certificateMap?.[certName]; + if (certificateData?.certificateId && typeof certificateData.certificateId === "string") { + const syncRecord = existingSyncRecords.find((record) => record.certificateId === certificateData.certificateId); + if (syncRecord) { + certificateIdsToRemove.push(certificateData.certificateId); + if (syncRecord.externalIdentifier) { + itemsToRemove.push(syncRecord.externalIdentifier); + } + } + } else { + const targetName = certName; + const syncRecord = existingSyncRecords.find((record) => record.externalIdentifier === targetName); + if (syncRecord && syncRecord.certificateId) { + certificateIdsToRemove.push(syncRecord.certificateId); + itemsToRemove.push(targetName); + } + } + } + + const removalPromises = itemsToRemove.map(async (itemName) => { + try { + await withRateLimitRetry( + () => + removeChefDataBagItem({ + serverUrl, + userName, + privateKey, + orgName, + dataBagName, + dataBagItemName: itemName + }), + { + operation: "remove-chef-data-bag-item", + syncId: sync.id + } + ); + } catch (error) { + logger.error( + { + syncId: sync.id, + itemName, + error: error instanceof Error ? error.message : String(error) + }, + "Failed to remove Chef data bag item during certificate removal" + ); + } + }); + + await Promise.allSettled(removalPromises); + + if (certificateIdsToRemove.length > 0) { + await certificateSyncDAL.removeCertificates(sync.id, certificateIdsToRemove); + } + }; + + return { + syncCertificates, + importCertificates, + removeCertificates + }; +}; diff --git a/backend/src/services/pki-sync/chef/chef-pki-sync-list-constants.ts b/backend/src/services/pki-sync/chef/chef-pki-sync-list-constants.ts new file mode 100644 index 0000000000..1142da2ac6 --- /dev/null +++ b/backend/src/services/pki-sync/chef/chef-pki-sync-list-constants.ts @@ -0,0 +1,10 @@ +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { PkiSync } from "@app/services/pki-sync/pki-sync-enums"; + +export const CHEF_PKI_SYNC_LIST_OPTION = { + name: "Chef" as const, + connection: AppConnection.Chef, + destination: PkiSync.Chef, + canImportCertificates: false, + canRemoveCertificates: true +} as const; diff --git a/backend/src/services/pki-sync/chef/chef-pki-sync-schemas.ts b/backend/src/services/pki-sync/chef/chef-pki-sync-schemas.ts new file mode 100644 index 0000000000..d52a20408c --- /dev/null +++ b/backend/src/services/pki-sync/chef/chef-pki-sync-schemas.ts @@ -0,0 +1,113 @@ +import RE2 from "re2"; +import { z } from "zod"; + +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { PkiSync } from "@app/services/pki-sync/pki-sync-enums"; +import { PkiSyncSchema } from "@app/services/pki-sync/pki-sync-schemas"; + +import { CHEF_PKI_SYNC_CERTIFICATE_NAMING, CHEF_PKI_SYNC_DATA_BAG_NAMING } from "./chef-pki-sync-constants"; + +export const ChefPkiSyncConfigSchema = z.object({ + dataBagName: z + .string() + .trim() + .min(1, "Data bag name required") + .max(255, "Data bag name cannot exceed 255 characters") + .refine( + (name) => CHEF_PKI_SYNC_DATA_BAG_NAMING.NAME_PATTERN.test(name), + "Data bag name can only contain alphanumeric characters, underscores, and hyphens" + ) +}); + +const ChefFieldMappingsSchema = z.object({ + certificate: z.string().min(1, "Certificate field name is required").default("certificate"), + privateKey: z.string().min(1, "Private key field name is required").default("private_key"), + certificateChain: z.string().min(1, "Certificate chain field name is required").default("certificate_chain"), + caCertificate: z.string().min(1, "CA certificate field name is required").default("ca_certificate") +}); + +const ChefPkiSyncOptionsSchema = z.object({ + canImportCertificates: z.boolean().default(false), + canRemoveCertificates: z.boolean().default(true), + includeRootCa: z.boolean().default(false), + preserveItemOnRenewal: z.boolean().default(true), + updateExistingCertificates: z.boolean().default(true), + certificateNameSchema: z + .string() + .optional() + .refine( + (schema) => { + if (!schema) return true; + + if (!schema.includes("{{certificateId}}")) { + return false; + } + + const testName = schema + .replace(new RE2("\\{\\{certificateId\\}\\}", "g"), "test-cert-id") + .replace(new RE2("\\{\\{profileId\\}\\}", "g"), "test-profile-id") + .replace(new RE2("\\{\\{commonName\\}\\}", "g"), "test-common-name") + .replace(new RE2("\\{\\{friendlyName\\}\\}", "g"), "test-friendly-name") + .replace(new RE2("\\{\\{environment\\}\\}", "g"), "test-env"); + + const hasForbiddenChars = CHEF_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS.split("").some((char) => + testName.includes(char) + ); + + return ( + CHEF_PKI_SYNC_CERTIFICATE_NAMING.NAME_PATTERN.test(testName) && + !hasForbiddenChars && + testName.length >= CHEF_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH && + testName.length <= CHEF_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH + ); + }, + { + message: + "Certificate item name schema must include {{certificateId}} placeholder and result in names that contain only alphanumeric characters, underscores, and hyphens and be 1-255 characters long for Chef data bag items." + } + ), + fieldMappings: ChefFieldMappingsSchema.optional().default({ + certificate: "certificate", + privateKey: "private_key", + certificateChain: "certificate_chain", + caCertificate: "ca_certificate" + }) +}); + +export const ChefPkiSyncSchema = PkiSyncSchema.extend({ + destination: z.literal(PkiSync.Chef), + destinationConfig: ChefPkiSyncConfigSchema, + syncOptions: ChefPkiSyncOptionsSchema +}); + +export const CreateChefPkiSyncSchema = z.object({ + name: z.string().trim().min(1).max(64), + description: z.string().optional(), + isAutoSyncEnabled: z.boolean().default(true), + destinationConfig: ChefPkiSyncConfigSchema, + syncOptions: ChefPkiSyncOptionsSchema.optional().default({}), + subscriberId: z.string().nullish(), + connectionId: z.string(), + projectId: z.string().trim().min(1), + certificateIds: z.array(z.string().uuid()).optional() +}); + +export const UpdateChefPkiSyncSchema = z.object({ + name: z.string().trim().min(1).max(64).optional(), + description: z.string().optional(), + isAutoSyncEnabled: z.boolean().optional(), + destinationConfig: ChefPkiSyncConfigSchema.optional(), + syncOptions: ChefPkiSyncOptionsSchema.optional(), + subscriberId: z.string().nullish(), + connectionId: z.string().optional() +}); + +export const ChefPkiSyncListItemSchema = z.object({ + name: z.literal("Chef"), + connection: z.literal(AppConnection.Chef), + destination: z.literal(PkiSync.Chef), + canImportCertificates: z.literal(false), + canRemoveCertificates: z.literal(true) +}); + +export { ChefFieldMappingsSchema }; diff --git a/backend/src/services/pki-sync/chef/chef-pki-sync-types.ts b/backend/src/services/pki-sync/chef/chef-pki-sync-types.ts new file mode 100644 index 0000000000..52ea83ea67 --- /dev/null +++ b/backend/src/services/pki-sync/chef/chef-pki-sync-types.ts @@ -0,0 +1,59 @@ +import { z } from "zod"; + +import { TChefConnection } from "@app/ee/services/app-connections/chef/chef-connection-types"; + +import { + ChefFieldMappingsSchema, + ChefPkiSyncConfigSchema, + ChefPkiSyncSchema, + CreateChefPkiSyncSchema, + UpdateChefPkiSyncSchema +} from "./chef-pki-sync-schemas"; + +export type TChefPkiSyncConfig = z.infer; + +export type TChefFieldMappings = z.infer; + +export type TChefPkiSync = z.infer; + +export type TChefPkiSyncInput = z.infer; + +export type TChefPkiSyncUpdate = z.infer; + +export type TChefPkiSyncWithCredentials = TChefPkiSync & { + connection: TChefConnection; +}; + +export interface ChefCertificateDataBagItem { + id: string; + [key: string]: string; +} + +export interface SyncCertificatesResult { + uploaded: number; + updated: number; + removed: number; + failedRemovals: number; + skipped: number; + details?: { + failedUploads?: Array<{ name: string; error: string }>; + failedRemovals?: Array<{ name: string; error: string }>; + validationErrors?: Array<{ name: string; error: string }>; + }; +} + +export interface RemoveCertificatesResult { + removed: number; + failed: number; + skipped: number; +} + +export interface CertificateImportRequest { + id: string; + name: string; + certificate: string; + privateKey: string; + certificateChain?: string; + alternativeNames?: string[]; + certificateId?: string; +} diff --git a/backend/src/services/pki-sync/chef/index.ts b/backend/src/services/pki-sync/chef/index.ts new file mode 100644 index 0000000000..0ccd62c707 --- /dev/null +++ b/backend/src/services/pki-sync/chef/index.ts @@ -0,0 +1,4 @@ +export * from "./chef-pki-sync-constants"; +export * from "./chef-pki-sync-fns"; +export * from "./chef-pki-sync-schemas"; +export * from "./chef-pki-sync-types"; diff --git a/backend/src/services/pki-sync/pki-sync-enums.ts b/backend/src/services/pki-sync/pki-sync-enums.ts index 46a7fc9756..bea444372a 100644 --- a/backend/src/services/pki-sync/pki-sync-enums.ts +++ b/backend/src/services/pki-sync/pki-sync-enums.ts @@ -1,6 +1,8 @@ export enum PkiSync { AzureKeyVault = "azure-key-vault", - AwsCertificateManager = "aws-certificate-manager" + AwsCertificateManager = "aws-certificate-manager", + AwsSecretsManager = "aws-secrets-manager", + Chef = "chef" } export enum PkiSyncStatus { diff --git a/backend/src/services/pki-sync/pki-sync-fns.ts b/backend/src/services/pki-sync/pki-sync-fns.ts index 961687f857..169cb328f2 100644 --- a/backend/src/services/pki-sync/pki-sync-fns.ts +++ b/backend/src/services/pki-sync/pki-sync-fns.ts @@ -10,8 +10,12 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION } from "./aws-certificate-manager/aws-certificate-manager-pki-sync-constants"; import { awsCertificateManagerPkiSyncFactory } from "./aws-certificate-manager/aws-certificate-manager-pki-sync-fns"; +import { AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION } from "./aws-secrets-manager/aws-secrets-manager-pki-sync-constants"; +import { awsSecretsManagerPkiSyncFactory } from "./aws-secrets-manager/aws-secrets-manager-pki-sync-fns"; import { AZURE_KEY_VAULT_PKI_SYNC_LIST_OPTION } from "./azure-key-vault/azure-key-vault-pki-sync-constants"; import { azureKeyVaultPkiSyncFactory } from "./azure-key-vault/azure-key-vault-pki-sync-fns"; +import { chefPkiSyncFactory } from "./chef/chef-pki-sync-fns"; +import { CHEF_PKI_SYNC_LIST_OPTION } from "./chef/chef-pki-sync-list-constants"; import { PkiSync } from "./pki-sync-enums"; import { TCertificateMap, TPkiSyncWithCredentials } from "./pki-sync-types"; @@ -19,7 +23,9 @@ const ENTERPRISE_PKI_SYNCS: PkiSync[] = []; const PKI_SYNC_LIST_OPTIONS = { [PkiSync.AzureKeyVault]: AZURE_KEY_VAULT_PKI_SYNC_LIST_OPTION, - [PkiSync.AwsCertificateManager]: AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION + [PkiSync.AwsCertificateManager]: AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION, + [PkiSync.AwsSecretsManager]: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION, + [PkiSync.Chef]: CHEF_PKI_SYNC_LIST_OPTION }; export const enterprisePkiSyncCheck = async ( @@ -162,6 +168,8 @@ export const PkiSyncFns = { dependencies: { appConnectionDAL: Pick; kmsService: Pick; + certificateDAL: TCertificateDALFactory; + certificateSyncDAL: TCertificateSyncDALFactory; } ): Promise => { switch (pkiSync.destination) { @@ -175,6 +183,14 @@ export const PkiSyncFns = { "AWS Certificate Manager does not support importing certificates into Infisical (private keys cannot be extracted)" ); } + case PkiSync.AwsSecretsManager: { + throw new Error("AWS Secrets Manager does not support importing certificates into Infisical"); + } + case PkiSync.Chef: { + throw new Error( + "Chef does not support importing certificates into Infisical (private keys cannot be extracted securely)" + ); + } default: throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`); } @@ -203,7 +219,7 @@ export const PkiSyncFns = { }> => { switch (pkiSync.destination) { case PkiSync.AzureKeyVault: { - checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault); + checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault as PkiSync); const azureKeyVaultPkiSync = azureKeyVaultPkiSyncFactory({ appConnectionDAL: dependencies.appConnectionDAL, kmsService: dependencies.kmsService, @@ -213,7 +229,7 @@ export const PkiSyncFns = { return azureKeyVaultPkiSync.syncCertificates(pkiSync, certificateMap); } case PkiSync.AwsCertificateManager: { - checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager); + checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager as PkiSync); const awsCertificateManagerPkiSync = awsCertificateManagerPkiSyncFactory({ appConnectionDAL: dependencies.appConnectionDAL, kmsService: dependencies.kmsService, @@ -222,6 +238,22 @@ export const PkiSyncFns = { }); return awsCertificateManagerPkiSync.syncCertificates(pkiSync, certificateMap); } + case PkiSync.AwsSecretsManager: { + checkPkiSyncDestination(pkiSync, PkiSync.AwsSecretsManager as PkiSync); + const awsSecretsManagerPkiSync = awsSecretsManagerPkiSyncFactory({ + certificateDAL: dependencies.certificateDAL, + certificateSyncDAL: dependencies.certificateSyncDAL + }); + return awsSecretsManagerPkiSync.syncCertificates(pkiSync, certificateMap); + } + case PkiSync.Chef: { + checkPkiSyncDestination(pkiSync, PkiSync.Chef as PkiSync); + const chefPkiSync = chefPkiSyncFactory({ + certificateDAL: dependencies.certificateDAL, + certificateSyncDAL: dependencies.certificateSyncDAL + }); + return chefPkiSync.syncCertificates(pkiSync, certificateMap); + } default: throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`); } @@ -240,7 +272,7 @@ export const PkiSyncFns = { ): Promise => { switch (pkiSync.destination) { case PkiSync.AzureKeyVault: { - checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault); + checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault as PkiSync); const azureKeyVaultPkiSync = azureKeyVaultPkiSyncFactory({ appConnectionDAL: dependencies.appConnectionDAL, kmsService: dependencies.kmsService, @@ -254,7 +286,7 @@ export const PkiSyncFns = { break; } case PkiSync.AwsCertificateManager: { - checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager); + checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager as PkiSync); const awsCertificateManagerPkiSync = awsCertificateManagerPkiSyncFactory({ appConnectionDAL: dependencies.appConnectionDAL, kmsService: dependencies.kmsService, @@ -267,6 +299,27 @@ export const PkiSyncFns = { }); break; } + case PkiSync.AwsSecretsManager: { + checkPkiSyncDestination(pkiSync, PkiSync.AwsSecretsManager as PkiSync); + const awsSecretsManagerPkiSync = awsSecretsManagerPkiSyncFactory({ + certificateDAL: dependencies.certificateDAL, + certificateSyncDAL: dependencies.certificateSyncDAL + }); + await awsSecretsManagerPkiSync.removeCertificates(pkiSync, dependencies.certificateMap); + break; + } + case PkiSync.Chef: { + checkPkiSyncDestination(pkiSync, PkiSync.Chef as PkiSync); + const chefPkiSync = chefPkiSyncFactory({ + certificateDAL: dependencies.certificateDAL, + certificateSyncDAL: dependencies.certificateSyncDAL + }); + await chefPkiSync.removeCertificates(pkiSync, certificateNames, { + certificateSyncDAL: dependencies.certificateSyncDAL, + certificateMap: dependencies.certificateMap + }); + break; + } default: throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`); } diff --git a/backend/src/services/pki-sync/pki-sync-maps.ts b/backend/src/services/pki-sync/pki-sync-maps.ts index 5c416b5130..a7edcbc113 100644 --- a/backend/src/services/pki-sync/pki-sync-maps.ts +++ b/backend/src/services/pki-sync/pki-sync-maps.ts @@ -4,10 +4,14 @@ import { PkiSync } from "./pki-sync-enums"; export const PKI_SYNC_NAME_MAP: Record = { [PkiSync.AzureKeyVault]: "Azure Key Vault", - [PkiSync.AwsCertificateManager]: "AWS Certificate Manager" + [PkiSync.AwsCertificateManager]: "AWS Certificate Manager", + [PkiSync.AwsSecretsManager]: "AWS Secrets Manager", + [PkiSync.Chef]: "Chef" }; export const PKI_SYNC_CONNECTION_MAP: Record = { [PkiSync.AzureKeyVault]: AppConnection.AzureKeyVault, - [PkiSync.AwsCertificateManager]: AppConnection.AWS + [PkiSync.AwsCertificateManager]: AppConnection.AWS, + [PkiSync.AwsSecretsManager]: AppConnection.AWS, + [PkiSync.Chef]: AppConnection.Chef }; diff --git a/backend/src/services/pki-sync/pki-sync-queue.ts b/backend/src/services/pki-sync/pki-sync-queue.ts index 608162eadf..c264176f91 100644 --- a/backend/src/services/pki-sync/pki-sync-queue.ts +++ b/backend/src/services/pki-sync/pki-sync-queue.ts @@ -26,6 +26,7 @@ import { TCertificateSecretDALFactory } from "../certificate/certificate-secret- import { TCertificateAuthorityCertDALFactory } from "../certificate-authority/certificate-authority-cert-dal"; import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal"; import { getCaCertChain } from "../certificate-authority/certificate-authority-fns"; +import { extractRootCaFromChain, removeRootCaFromChain } from "../certificate-common/certificate-utils"; import { TCertificateSyncDALFactory } from "../certificate-sync/certificate-sync-dal"; import { CertificateSyncStatus } from "../certificate-sync/certificate-sync-enums"; import { TPkiSyncDALFactory } from "./pki-sync-dal"; @@ -180,11 +181,16 @@ export const pkiSyncQueueFactory = ({ (cert, index, self) => self.findIndex((c) => c.id === cert.id) === index ); - if (uniqueCertificates.length === 0) { + const activeCertificates = uniqueCertificates.filter((cert) => { + const typedCert = cert as TCertificates; + return !typedCert.renewedByCertificateId; + }); + + if (activeCertificates.length === 0) { return { certificateMap, certificateMetadata }; } - certificates = uniqueCertificates; + certificates = activeCertificates; for (const certificate of certificates) { const cert = certificate as TCertificates; @@ -231,13 +237,15 @@ export const pkiSyncQueueFactory = ({ } let certificateChain: string | undefined; + let caCertificate: string | undefined; try { if (certBody.encryptedCertificateChain) { const decryptedCertChain = await kmsDecryptor({ cipherTextBlob: certBody.encryptedCertificateChain }); certificateChain = decryptedCertChain.toString(); - } else if (certificate.caCertId) { + } + if (certificate.caCertId) { const { caCert, caCertChain } = await getCaCertChain({ caCertId: certificate.caCertId, certificateAuthorityDAL, @@ -245,7 +253,10 @@ export const pkiSyncQueueFactory = ({ projectDAL, kmsService }); - certificateChain = `${caCert}\n${caCertChain}`.trim(); + if (!certBody.encryptedCertificateChain) { + certificateChain = `${caCert}\n${caCertChain}`.trim(); + } + caCertificate = certificateChain ? extractRootCaFromChain(certificateChain) : caCert; } } catch (chainError) { logger.warn( @@ -254,10 +265,16 @@ export const pkiSyncQueueFactory = ({ ); // Continue without certificate chain certificateChain = undefined; + caCertificate = undefined; } let certificateName: string; - const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined; + const syncOptions = pkiSync.syncOptions as + | { + certificateNameSchema?: string; + includeRootCa?: boolean; + } + | undefined; const certificateNameSchema = syncOptions?.certificateNameSchema; if (certificateNameSchema) { @@ -289,10 +306,16 @@ export const pkiSyncQueueFactory = ({ alternativeNames.push(originalLegacyName); } + let processedCertificateChain = certificateChain; + if (certificateChain && syncOptions?.includeRootCa === false) { + processedCertificateChain = removeRootCaFromChain(certificateChain); + } + certificateMap[certificateName] = { cert: certificatePem, privateKey: certPrivateKey || "", - certificateChain, + certificateChain: processedCertificateChain, + caCertificate, alternativeNames, certificateId: certificate.id }; diff --git a/backend/src/services/pki-sync/pki-sync-schemas.ts b/backend/src/services/pki-sync/pki-sync-schemas.ts index 95023002e7..635103a896 100644 --- a/backend/src/services/pki-sync/pki-sync-schemas.ts +++ b/backend/src/services/pki-sync/pki-sync-schemas.ts @@ -7,6 +7,7 @@ import { PkiSync } from "./pki-sync-enums"; export const PkiSyncOptionsSchema = z.object({ canImportCertificates: z.boolean(), canRemoveCertificates: z.boolean().optional(), + includeRootCa: z.boolean().optional().default(false), certificateNameSchema: z .string() .optional() diff --git a/backend/src/services/pki-sync/pki-sync-types.ts b/backend/src/services/pki-sync/pki-sync-types.ts index f42f64a1bf..fa76ddb553 100644 --- a/backend/src/services/pki-sync/pki-sync-types.ts +++ b/backend/src/services/pki-sync/pki-sync-types.ts @@ -73,7 +73,14 @@ export type TPkiSyncListItem = TPkiSync & { export type TCertificateMap = Record< string, - { cert: string; privateKey: string; certificateChain?: string; alternativeNames?: string[]; certificateId?: string } + { + cert: string; + privateKey: string; + certificateChain?: string; + caCertificate?: string; + alternativeNames?: string[]; + certificateId?: string; + } >; export type TCreatePkiSyncDTO = { diff --git a/backend/tsconfig.dev.json b/backend/tsconfig.dev.json new file mode 100644 index 0000000000..4bcbcd5e1c --- /dev/null +++ b/backend/tsconfig.dev.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": { + "@app/*": ["./src/*"], + "@bdd_routes/bdd-nock-router": ["./src/server/routes/bdd/bdd-nock-router.dev.ts"] + } + } +} diff --git a/backend/tsconfig.json b/backend/tsconfig.json index 523e6de5be..db076a30d6 100644 --- a/backend/tsconfig.json +++ b/backend/tsconfig.json @@ -24,7 +24,8 @@ "skipLibCheck": true, "baseUrl": ".", "paths": { - "@app/*": ["./src/*"] + "@app/*": ["./src/*"], + "@bdd_routes/*": ["./src/server/routes/bdd/*"] }, "jsx": "react-jsx" }, diff --git a/backend/tsup.config.js b/backend/tsup.config.js index e09a21ff23..80ec73a14f 100644 --- a/backend/tsup.config.js +++ b/backend/tsup.config.js @@ -2,8 +2,8 @@ import path from "node:path"; import fs from "fs/promises"; -import {replaceTscAliasPaths} from "tsc-alias"; -import {defineConfig} from "tsup"; +import { replaceTscAliasPaths } from "tsc-alias"; +import { defineConfig } from "tsup"; // Instead of using tsx or tsc for building, consider using tsup. // TSX serves as an alternative to Node.js, allowing you to build directly on the Node.js runtime. @@ -29,7 +29,7 @@ export default defineConfig({ external: ["../../../frontend/node_modules/next/dist/server/next-server.js"], outDir: "dist", tsconfig: "./tsconfig.json", - entry: ["./src"], + entry: ["./src", "!./src/**/*.dev.ts"], sourceMap: true, skipNodeModulesBundle: true, esbuildPlugins: [ @@ -45,22 +45,22 @@ export default defineConfig({ const isRelativePath = args.path.startsWith("."); const absPath = isRelativePath ? path.join(args.resolveDir, args.path) - : path.join(args.path.replace("@app", "./src")); + : path.join(args.path.replace("@app", "./src").replace("@bdd_routes", "./src/server/routes/bdd")); const isFile = await fs .stat(`${absPath}.ts`) .then((el) => el.isFile) - .catch(async (err) => { - if (err.code === "ENOTDIR") { - return true; - } + .catch(async (err) => { + if (err.code === "ENOTDIR") { + return true; + } - // If .ts file doesn't exist, try checking for .tsx file - return fs - .stat(`${absPath}.tsx`) - .then((el) => el.isFile) - .catch((err) => err.code === "ENOTDIR"); - }); + // If .ts file doesn't exist, try checking for .tsx file + return fs + .stat(`${absPath}.tsx`) + .then((el) => el.isFile) + .catch((err) => err.code === "ENOTDIR"); + }); return { path: isFile ? `${args.path}.mjs` : `${args.path}/index.mjs`, diff --git a/backend/vitest.e2e.config.mts b/backend/vitest.e2e.config.mts index 83554b818c..a37ca9518e 100644 --- a/backend/vitest.e2e.config.mts +++ b/backend/vitest.e2e.config.mts @@ -28,7 +28,8 @@ export default defineConfig({ }, resolve: { alias: { - "@app": path.resolve(__dirname, "./src") + "@app": path.resolve(__dirname, "./src"), + "@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts") } } }); diff --git a/backend/vitest.unit.config.mts b/backend/vitest.unit.config.mts index 97862d2884..aa56063a9c 100644 --- a/backend/vitest.unit.config.mts +++ b/backend/vitest.unit.config.mts @@ -11,7 +11,8 @@ export default defineConfig({ }, resolve: { alias: { - "@app": path.resolve(__dirname, "./src") + "@app": path.resolve(__dirname, "./src"), + "@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts") } } }); diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index e60ef1ba56..b75b6df221 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -71,6 +71,7 @@ services: ports: - 4000:4000 - 9464:9464 # for OTEL collection of Prometheus metrics + - 9229:9229 # For debugger access environment: - NODE_ENV=development - DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable diff --git a/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/create.mdx b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/create.mdx new file mode 100644 index 0000000000..802a6e6392 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create AWS Secrets Manager PKI Sync" +openapi: "POST /api/v1/pki/syncs/aws-secrets-manager" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/delete.mdx b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/delete.mdx new file mode 100644 index 0000000000..9912a9ee15 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete AWS Secrets Manager PKI Sync" +openapi: "DELETE /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/get-by-id.mdx b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/get-by-id.mdx new file mode 100644 index 0000000000..9b678dcf58 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get AWS Secrets Manager PKI Sync by ID" +openapi: "GET /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/list.mdx b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/list.mdx new file mode 100644 index 0000000000..f487770bbc --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List AWS Secrets Manager PKI Syncs" +openapi: "GET /api/v1/pki/syncs/aws-secrets-manager" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/remove-certificates.mdx b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/remove-certificates.mdx new file mode 100644 index 0000000000..f049537ab2 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/remove-certificates.mdx @@ -0,0 +1,4 @@ +--- +title: "Remove Certificates from AWS Secrets Manager" +openapi: "POST /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}/remove-certificates" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/sync-certificates.mdx b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/sync-certificates.mdx new file mode 100644 index 0000000000..acecf1b83e --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/sync-certificates.mdx @@ -0,0 +1,4 @@ +--- +title: "Sync Certificates to AWS Secrets Manager" +openapi: "POST /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}/sync-certificates" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/update.mdx b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/update.mdx new file mode 100644 index 0000000000..b123f39866 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/aws-secrets-manager/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update AWS Secrets Manager PKI Sync" +openapi: "PATCH /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/chef/create.mdx b/docs/api-reference/endpoints/pki/syncs/chef/create.mdx new file mode 100644 index 0000000000..64807de11d --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/chef/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create Chef PKI Sync" +openapi: "POST /api/v1/pki/syncs/chef" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/chef/delete.mdx b/docs/api-reference/endpoints/pki/syncs/chef/delete.mdx new file mode 100644 index 0000000000..b22dbda830 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/chef/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete Chef PKI Sync" +openapi: "DELETE /api/v1/pki/syncs/chef/{pkiSyncId}" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/chef/get-by-id.mdx b/docs/api-reference/endpoints/pki/syncs/chef/get-by-id.mdx new file mode 100644 index 0000000000..ece07770ec --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/chef/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Chef PKI Sync by ID" +openapi: "GET /api/v1/pki/syncs/chef/{pkiSyncId}" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/chef/list.mdx b/docs/api-reference/endpoints/pki/syncs/chef/list.mdx new file mode 100644 index 0000000000..8e00bed46e --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/chef/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List Chef PKI Syncs" +openapi: "GET /api/v1/pki/syncs/chef" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/chef/remove-certificates.mdx b/docs/api-reference/endpoints/pki/syncs/chef/remove-certificates.mdx new file mode 100644 index 0000000000..f4bb6816a8 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/chef/remove-certificates.mdx @@ -0,0 +1,4 @@ +--- +title: "Remove Certificates from Chef" +openapi: "POST /api/v1/pki/syncs/chef/{pkiSyncId}/remove-certificates" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/chef/sync-certificates.mdx b/docs/api-reference/endpoints/pki/syncs/chef/sync-certificates.mdx new file mode 100644 index 0000000000..109248d8a4 --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/chef/sync-certificates.mdx @@ -0,0 +1,4 @@ +--- +title: "Sync Certificates to Chef" +openapi: "POST /api/v1/pki/syncs/chef/{pkiSyncId}/sync" +--- \ No newline at end of file diff --git a/docs/api-reference/endpoints/pki/syncs/chef/update.mdx b/docs/api-reference/endpoints/pki/syncs/chef/update.mdx new file mode 100644 index 0000000000..2d08b40e9c --- /dev/null +++ b/docs/api-reference/endpoints/pki/syncs/chef/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update Chef PKI Sync" +openapi: "PATCH /api/v1/pki/syncs/chef/{pkiSyncId}" +--- \ No newline at end of file diff --git a/docs/cli/commands/gateway.mdx b/docs/cli/commands/gateway.mdx index 32612938a2..e7da7a74f6 100644 --- a/docs/cli/commands/gateway.mdx +++ b/docs/cli/commands/gateway.mdx @@ -40,12 +40,12 @@ sudo infisical gateway start --name= --auth-method= ``` - By default, the gateway automatically connects to the relay with the lowest latency. To target a specific relay, use the `--relay=` flag. + By default, the gateway automatically connects to the relay with the lowest latency. To target a specific relay, use the `--target-relay-name=` flag. Once started, the gateway component will: -- Automatically connect to a healthy relay with the lowest latency (unless the `--relay` flag is specified) +- Automatically connect to a healthy relay with the lowest latency (unless the `--target-relay-name` flag is specified) - Establish outbound SSH reverse tunnel to relay server (no inbound firewall rules needed) - Authenticate using SSH certificates issued by Infisical - Automatically reconnect if the connection is lost @@ -252,14 +252,14 @@ The Gateway supports multiple authentication methods. Below are the available au ### Other Flags - + The name of the relay that this gateway should connect to. The relay must be running and registered before starting the gateway. If this flag is omitted, the gateway will automatically connect to a healthy relay with the lowest latency. ```bash # Example - sudo infisical gateway start --relay=my-relay --name=my-gateway --token= + sudo infisical gateway start --target-relay-name=my-relay --name=my-gateway --token= ``` **Note:** For Infisical Cloud users using instance relays, the relay infrastructure is already running and managed by Infisical. If using organization relays or self-hosted instance relays, you must first start a relay server. For more information on deploying relays, refer to the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment). @@ -336,14 +336,14 @@ sudo infisical gateway systemd install --token= --domain= --name= - + The name of the relay that this gateway should connect to. The relay must be running and registered before starting the gateway. If this flag is omitted, the gateway will automatically connect to a healthy relay with the lowest latency. ```bash # Example - sudo infisical gateway systemd install --relay=my-relay --token= --name= + sudo infisical gateway systemd install --target-relay-name=my-relay --token= --name= ``` **Note:** For Infisical Cloud users using instance relays, the relay infrastructure is already running and managed by Infisical. If using organization relays or self-hosted instance relays, you must first start a relay server. For more information on deploying relays, refer to the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment). @@ -687,7 +687,7 @@ sudo systemctl disable infisical-gateway # Disable auto-start on boot - If the `--relay` flag is omitted, the gateway automatically selects the optimal relay. It first checks for healthy organization relays and connects to the one with the lowest latency. If no organization relays are available, it then performs the same latency-based selection among the available managed relays. + If the `--target-relay-name` flag is omitted, the gateway automatically selects the optimal relay. It first checks for healthy organization relays and connects to the one with the lowest latency. If no organization relays are available, it then performs the same latency-based selection among the available managed relays. No. The first time the gateway starts, it selects the optimal relay (based on latency) and caches that selection. On subsequent restarts, it will prioritize connecting to the cached relay. If it's unable to connect, it will then re-evaluate and connect to the next most optimal relay available. diff --git a/docs/docs.json b/docs/docs.json index 8bed5694c3..57f44596fd 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -766,7 +766,9 @@ "pages": [ "documentation/platform/pki/certificate-syncs/overview", "documentation/platform/pki/certificate-syncs/aws-certificate-manager", - "documentation/platform/pki/certificate-syncs/azure-key-vault" + "documentation/platform/pki/certificate-syncs/aws-secrets-manager", + "documentation/platform/pki/certificate-syncs/azure-key-vault", + "documentation/platform/pki/certificate-syncs/chef" ] }, { @@ -2617,6 +2619,18 @@ "api-reference/endpoints/pki/syncs/aws-certificate-manager/remove-certificates" ] }, + { + "group": "AWS Secrets Manager", + "pages": [ + "api-reference/endpoints/pki/syncs/aws-secrets-manager/list", + "api-reference/endpoints/pki/syncs/aws-secrets-manager/get-by-id", + "api-reference/endpoints/pki/syncs/aws-secrets-manager/create", + "api-reference/endpoints/pki/syncs/aws-secrets-manager/update", + "api-reference/endpoints/pki/syncs/aws-secrets-manager/delete", + "api-reference/endpoints/pki/syncs/aws-secrets-manager/sync-certificates", + "api-reference/endpoints/pki/syncs/aws-secrets-manager/remove-certificates" + ] + }, { "group": "Azure Key Vault", "pages": [ @@ -2628,6 +2642,18 @@ "api-reference/endpoints/pki/syncs/azure-key-vault/sync-certificates", "api-reference/endpoints/pki/syncs/azure-key-vault/remove-certificates" ] + }, + { + "group": "Chef", + "pages": [ + "api-reference/endpoints/pki/syncs/chef/list", + "api-reference/endpoints/pki/syncs/chef/get-by-id", + "api-reference/endpoints/pki/syncs/chef/create", + "api-reference/endpoints/pki/syncs/chef/update", + "api-reference/endpoints/pki/syncs/chef/delete", + "api-reference/endpoints/pki/syncs/chef/sync-certificates", + "api-reference/endpoints/pki/syncs/chef/remove-certificates" + ] } ] } diff --git a/docs/documentation/platform/gateways/gateway-deployment.mdx b/docs/documentation/platform/gateways/gateway-deployment.mdx index c7b8763c26..0d81367754 100644 --- a/docs/documentation/platform/gateways/gateway-deployment.mdx +++ b/docs/documentation/platform/gateways/gateway-deployment.mdx @@ -127,7 +127,7 @@ To successfully deploy an Infisical Gateway for use, follow these steps in order ``` - By default, the gateway connects to the most optimal relay. Use the `--relay` flag to manually specify a different relay server. + By default, the gateway connects to the most optimal relay. Use the `--target-relay-name` flag to manually specify a different relay server. @@ -177,7 +177,7 @@ To successfully deploy an Infisical Gateway for use, follow these steps in order ``` - By default, the gateway connects to the most optimal relay. Use the `--relay` flag to manually specify a different relay server. + By default, the gateway connects to the most optimal relay. Use the `--target-relay-name` flag to manually specify a different relay server. diff --git a/docs/documentation/platform/pki/certificate-syncs/aws-certificate-manager.mdx b/docs/documentation/platform/pki/certificate-syncs/aws-certificate-manager.mdx index e33f46f3ee..e2cad7498e 100644 --- a/docs/documentation/platform/pki/certificate-syncs/aws-certificate-manager.mdx +++ b/docs/documentation/platform/pki/certificate-syncs/aws-certificate-manager.mdx @@ -39,6 +39,7 @@ These permissions allow Infisical to list, import, tag, and manage certificates - **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical. - **Preserve ARN on Renewal**: If enabled, Infisical will sync renewed certificates to the destination under the same ARN as the original synced certificate instead of creating a new certificate with a new ARN. + - **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to AWS Certificate Manager. If disabled, only intermediate certificates will be included. - **Certificate Name Schema** (Optional): Customize how certificate tags are generated in AWS Certificate Manager. Must include `{{certificateId}}` as a placeholder for the certificate ID to ensure proper certificate identification and management. If not specified, defaults to `Infisical-{{certificateId}}`. - **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only. @@ -86,6 +87,7 @@ These permissions allow Infisical to list, import, tag, and manage certificates "syncOptions": { "canRemoveCertificates": true, "preserveArnOnRenewal": true, + "includeRootCa": false, "certificateNameSchema": "myapp-{{certificateId}}" }, "destinationConfig": { @@ -110,6 +112,7 @@ These permissions allow Infisical to list, import, tag, and manage certificates "syncOptions": { "canRemoveCertificates": true, "preserveArnOnRenewal": true, + "includeRootCa": false, "certificateNameSchema": "myapp-{{certificateId}}" }, "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", diff --git a/docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx b/docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx new file mode 100644 index 0000000000..26472ce44f --- /dev/null +++ b/docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx @@ -0,0 +1,251 @@ +--- +title: "AWS Secrets Manager" +description: "Learn how to configure an AWS Secrets Manager Certificate Sync for Infisical PKI." +--- + +**Prerequisites:** + +- Create an [AWS Connection](/integrations/app-connections/aws) +- Ensure your network security policies allow incoming requests from Infisical to this certificate sync provider, if network restrictions apply. + + + The AWS Secrets Manager Certificate Sync requires the following permissions to be set on the AWS IAM user + for Infisical to sync certificates to AWS Secrets Manager: `secretsmanager:CreateSecret`, `secretsmanager:UpdateSecret`, + `secretsmanager:GetSecretValue`, `secretsmanager:DeleteSecret`, `secretsmanager:ListSecrets`. + +Any role with these permissions would work such as a custom policy with **SecretsManager** permissions. + + + + + Certificates synced to AWS Secrets Manager will be stored as JSON secrets, + preserving both the certificate and private key components as separate fields within the secret value. + + + + + 1. Navigate to **Project** > **Integrations** > **Certificate Syncs** and press **Add Sync**. + ![Certificate Syncs Tab](/images/platform/pki/certificate-syncs/general/create-certificate-sync.png) + + 2. Select the **AWS Secrets Manager** option. + ![Select AWS Secrets Manager](/images/platform/pki/certificate-syncs/aws-secrets-manager/select-aws-secrets-manager-option.png) + + 3. Configure the **Destination** to where certificates should be deployed, then click **Next**. + ![Configure Destination](/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-destination.png) + + - **AWS Connection**: The AWS Connection to authenticate with. + - **Region**: The AWS region where secrets will be stored. + + 4. Configure the **Sync Options** to specify how certificates should be synced, then click **Next**. + ![Configure Options](/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-options.png) + + - **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical. + - **Preserve Secret on Renewal**: Only applies to certificate renewals. When a certificate is renewed in Infisical, this option controls how the renewed certificate is handled. If enabled, the renewed certificate will update the existing secret, preserving the same secret name. If disabled, the renewed certificate will be created as a new secret with a new name. + - **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to AWS Secrets Manager. If disabled, only intermediate certificates will be included. + - **Certificate Name Schema** (Optional): Customize how secret names are generated in AWS Secrets Manager. Use `{{certificateId}}` as a placeholder for the certificate ID. + - **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only. + + 5. Configure the **Field Mappings** to customize how certificate data is stored in AWS Secrets Manager secrets, then click **Next**. + ![Configure Field Mappings](/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-field-mappings.png) + + - **Certificate Field**: The field name where the certificate will be stored in the secret value (default: `certificate`) + - **Private Key Field**: The field name where the private key will be stored in the secret value (default: `private_key`) + - **Certificate Chain Field**: The field name where the full certificate chain excluding the root CA certificate will be stored (default: `certificate_chain`) + - **CA Certificate Field**: The field name where the root CA certificate will be stored (default: `ca_certificate`) + + + **AWS Secrets Manager Secret Structure**: Certificates are stored in AWS Secrets Manager as JSON secrets with the following structure (field names can be customized via field mappings): + ```json + { + "certificate": "-----BEGIN CERTIFICATE-----\n...", + "private_key": "-----BEGIN PRIVATE KEY-----\n...", + "certificate_chain": "-----BEGIN CERTIFICATE-----\n...", + "ca_certificate": "-----BEGIN CERTIFICATE-----\n..." + } + ``` + + **Example with Custom Field Mappings**: + ```json + { + "ssl_cert": "-----BEGIN CERTIFICATE-----\n...", + "ssl_key": "-----BEGIN PRIVATE KEY-----\n...", + "ssl_chain": "-----BEGIN CERTIFICATE-----\n...", + "ssl_ca": "-----BEGIN CERTIFICATE-----\n..." + } + ``` + + + 6. Configure the **Details** of your AWS Secrets Manager Certificate Sync, then click **Next**. + ![Configure Details](/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-details.png) + + - **Name**: The name of your sync. Must be slug-friendly. + - **Description**: An optional description for your sync. + + 7. Select which certificates should be synced to AWS Secrets Manager. + ![Select Certificates](/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-certificates.png) + + 8. Review your AWS Secrets Manager Certificate Sync configuration, then click **Create Sync**. + ![Confirm Configuration](/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-review.png) + + 9. If enabled, your AWS Secrets Manager Certificate Sync will begin syncing your certificates to the destination endpoint. + ![Sync Certificates](/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-synced.png) + + + To create an **AWS Secrets Manager Certificate Sync**, make an API request to the [Create AWS Secrets Manager Certificate Sync](/api-reference/endpoints/pki/syncs/aws-secrets-manager/create) API endpoint. + + ### Sample request + + + You can optionally specify `certificateIds` during sync creation to immediately add certificates to the sync. + If not provided, you can add certificates later using the certificate management endpoints. + + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/pki/syncs/aws-secrets-manager \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-aws-secrets-manager-cert-sync", + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "description": "an example certificate sync", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "destination": "aws-secrets-manager", + "isAutoSyncEnabled": true, + "certificateIds": [ + "550e8400-e29b-41d4-a716-446655440000", + "660f1234-e29b-41d4-a716-446655440001" + ], + "syncOptions": { + "canRemoveCertificates": true, + "preserveSecretOnRenewal": true, + "canImportCertificates": false, + "includeRootCa": false, + "certificateNameSchema": "myapp-{{certificateId}}", + "fieldMappings": { + "certificate": "ssl_cert", + "privateKey": "ssl_key", + "certificateChain": "ssl_chain", + "caCertificate": "ssl_ca" + } + }, + "destinationConfig": { + "region": "us-east-1", + "keyId": "alias/my-kms-key" + } + }' + ``` + + ### Example with Default Field Mappings + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/pki/syncs/aws-secrets-manager \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-aws-secrets-manager-cert-sync-default", + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "description": "AWS Secrets Manager sync with default field mappings", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "destination": "aws-secrets-manager", + "isAutoSyncEnabled": true, + "syncOptions": { + "canRemoveCertificates": true, + "preserveSecretOnRenewal": true, + "canImportCertificates": false, + "includeRootCa": false, + "certificateNameSchema": "infisical-{{certificateId}}", + "fieldMappings": { + "certificate": "certificate", + "privateKey": "private_key", + "certificateChain": "certificate_chain", + "caCertificate": "ca_certificate" + } + }, + "destinationConfig": { + "region": "us-west-2" + } + }' + ``` + + ### Sample response + + ```json Response + { + "pkiSync": { + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "name": "my-aws-secrets-manager-cert-sync", + "description": "an example certificate sync", + "destination": "aws-secrets-manager", + "isAutoSyncEnabled": true, + "destinationConfig": { + "region": "us-east-1", + "keyId": "alias/my-kms-key" + }, + "syncOptions": { + "canRemoveCertificates": true, + "preserveSecretOnRenewal": true, + "canImportCertificates": false, + "includeRootCa": false, + "certificateNameSchema": "myapp-{{certificateId}}", + "fieldMappings": { + "certificate": "ssl_cert", + "privateKey": "ssl_key", + "certificateChain": "ssl_chain", + "caCertificate": "ssl_ca" + } + }, + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "createdAt": "2023-01-01T00:00:00.000Z", + "updatedAt": "2023-01-01T00:00:00.000Z" + } + } + ``` + + + + +## Certificate Management + +Your AWS Secrets Manager Certificate Sync will: + +- **Automatic Deployment**: Deploy certificates in Infisical to AWS Secrets Manager as JSON secrets with customizable field names +- **Certificate Updates**: Update certificates in AWS Secrets Manager when renewals occur +- **Expiration Handling**: Optionally remove expired certificates from AWS Secrets Manager (if enabled) +- **Format Preservation**: Maintain certificate format during sync operations +- **Field Customization**: Map certificate data to custom field names that match your application requirements +- **CA Certificate Support**: Include CA certificates in secrets for complete certificate chain management +- **KMS Encryption**: Optionally use custom KMS keys for secret encryption +- **Regional Deployment**: Deploy secrets to specific AWS regions + + + AWS Secrets Manager Certificate Syncs support both automatic and manual + synchronization modes. When auto-sync is enabled, certificates are + automatically deployed as they are issued or renewed. + + +## Manual Certificate Sync + +You can manually trigger certificate synchronization to AWS Secrets Manager using the sync certificates functionality. This is useful for: + +- Initial setup when you have existing certificates to deploy +- One-time sync of specific certificates +- Testing certificate sync configurations +- Force sync after making changes + +To manually sync certificates, use the [Sync Certificates](/api-reference/endpoints/pki/syncs/aws-secrets-manager/sync-certificates) API endpoint or the manual sync option in the Infisical UI. + + + AWS Secrets Manager does not support importing certificates back into Infisical + due to the nature of AWS Secrets Manager where certificates are stored as JSON secrets + rather than managed certificate objects. + + +## Secret Naming Constraints + +AWS Secrets Manager has specific naming requirements for secrets: + +- **Allowed Characters**: Letters, numbers, hyphens (-), and underscores (_) only +- **Length**: 1-512 characters diff --git a/docs/documentation/platform/pki/certificate-syncs/azure-key-vault.mdx b/docs/documentation/platform/pki/certificate-syncs/azure-key-vault.mdx index cfdbfe136c..135c741128 100644 --- a/docs/documentation/platform/pki/certificate-syncs/azure-key-vault.mdx +++ b/docs/documentation/platform/pki/certificate-syncs/azure-key-vault.mdx @@ -40,6 +40,7 @@ Any role with these permissions would work such as the **Key Vault Certificates - **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical. - **Enable Versioning on Renewal**: If enabled, Infisical will sync renewed certificates to the destination under a new version of the original synced certificate instead of creating a new certificate. + - **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to Azure Key Vault. If disabled, only intermediate certificates will be included. - **Certificate Name Schema** (Optional): Customize how certificate names are generated in Azure Key Vault. Use `{{certificateId}}` as a placeholder for the certificate ID. If not specified, defaults to `Infisical-{{certificateId}}`. - **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only. @@ -93,6 +94,7 @@ Any role with these permissions would work such as the **Key Vault Certificates "syncOptions": { "canRemoveCertificates": true, "enableVersioningOnRenewal": true, + "includeRootCa": false, "certificateNameSchema": "myapp-{{certificateId}}" }, "destinationConfig": { @@ -117,6 +119,7 @@ Any role with these permissions would work such as the **Key Vault Certificates "syncOptions": { "canRemoveCertificates": true, "enableVersioningOnRenewal": true, + "includeRootCa": false, "certificateNameSchema": "myapp-{{certificateId}}" }, "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", diff --git a/docs/documentation/platform/pki/certificate-syncs/chef.mdx b/docs/documentation/platform/pki/certificate-syncs/chef.mdx new file mode 100644 index 0000000000..506a2c76a5 --- /dev/null +++ b/docs/documentation/platform/pki/certificate-syncs/chef.mdx @@ -0,0 +1,241 @@ +--- +title: "Chef" +description: "Learn how to configure a Chef Certificate Sync for Infisical PKI." +--- + +**Prerequisites:** + +- Create a [Chef Connection](/integrations/app-connections/chef) +- Ensure your network security policies allow incoming requests from Infisical to this certificate sync provider, if network restrictions apply. + + + The Chef Certificate Sync requires the following permissions to be set on the Chef user + for Infisical to sync certificates to Chef: `data bag read`, `data bag create`, `data bag update`, `data bag delete`. + +Any role with these permissions would work such as a custom role with **Data Bag** permissions. + + + + + Certificates synced to Chef will be stored as data bag items within the specified data bag, + preserving both the certificate and private key components as separate fields. + + + + + 1. Navigate to **Project** > **Integrations** > **Certificate Syncs** and press **Add Sync**. + ![Certificate Syncs Tab](/images/platform/pki/certificate-syncs/general/create-certificate-sync.png) + + 2. Select the **Chef** option. + ![Select Chef](/images/platform/pki/certificate-syncs/chef/select-chef-option.png) + + 3. Configure the **Destination** to where certificates should be deployed, then click **Next**. + ![Configure Destination](/images/platform/pki/certificate-syncs/chef/chef-destination.png) + + - **Chef Connection**: The Chef Connection to authenticate with. + - **Data Bag Name**: The name of the Chef data bag where certificates will be stored. + + 4. Configure the **Sync Options** to specify how certificates should be synced, then click **Next**. + ![Configure Options](/images/platform/pki/certificate-syncs/chef/chef-options.png) + + - **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical. + - **Preserve Data Bag Item on Renewal**: Only applies to certificate renewals. When a certificate is renewed in Infisical, this option controls how the renewed certificate is handled. If enabled, the renewed certificate will update the existing data bag item, preserving the same item name. If disabled, the renewed certificate will be created as a new data bag item with a new name. + - **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to Chef data bags. If disabled, only intermediate certificates will be included. + - **Certificate Name Schema** (Optional): Customize how certificate item names are generated in Chef data bags. Use `{{certificateId}}` as a placeholder for the certificate ID. + - **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only. + + 5. Configure the **Field Mappings** to customize how certificate data is stored in Chef data bag items, then click **Next**. + ![Configure Field Mappings](/images/platform/pki/certificate-syncs/chef/chef-field-mappings.png) + + - **Certificate Field**: The field name where the certificate will be stored in the data bag item (default: `certificate`) + - **Private Key Field**: The field name where the private key will be stored in the data bag item (default: `private_key`) + - **Certificate Chain Field**: The field name where the full certificate chain excluding the root CA certificate will be stored (default: `certificate_chain`) + - **CA Certificate Field**: The field name where the root CA certificate will be stored (default: `ca_certificate`) + + + **Chef Data Bag Item Structure**: Certificates are stored in Chef data bags as items with the following structure (field names can be customized via field mappings): + ```json + { + "id": "certificate-item-name", + "certificate": "-----BEGIN CERTIFICATE-----\n...", + "private_key": "-----BEGIN PRIVATE KEY-----\n...", + "certificate_chain": "-----BEGIN CERTIFICATE-----\n...", + "ca_certificate": "-----BEGIN CERTIFICATE-----\n..." + } + ``` + + **Example with Custom Field Mappings**: + ```json + { + "id": "certificate-item-name", + "ssl_cert": "-----BEGIN CERTIFICATE-----\n...", + "ssl_key": "-----BEGIN PRIVATE KEY-----\n...", + "ssl_chain": "-----BEGIN CERTIFICATE-----\n...", + "ssl_ca": "-----BEGIN CERTIFICATE-----\n..." + } + ``` + + + 6. Configure the **Details** of your Chef Certificate Sync, then click **Next**. + ![Configure Details](/images/platform/pki/certificate-syncs/chef/chef-details.png) + + - **Name**: The name of your sync. Must be slug-friendly. + - **Description**: An optional description for your sync. + + 7. Select which certificates should be synced to Chef. + ![Select Certificates](/images/platform/pki/certificate-syncs/chef/chef-certificates.png) + + 8. Review your Chef Certificate Sync configuration, then click **Create Sync**. + ![Confirm Configuration](/images/platform/pki/certificate-syncs/chef/chef-review.png) + + 9. If enabled, your Chef Certificate Sync will begin syncing your certificates to the destination endpoint. + ![Sync Certificates](/images/platform/pki/certificate-syncs/chef/chef-synced.png) + + + To create a **Chef Certificate Sync**, make an API request to the [Create Chef Certificate Sync](/api-reference/endpoints/pki/syncs/chef/create) API endpoint. + + ### Sample request + + + You can optionally specify `certificateIds` during sync creation to immediately add certificates to the sync. + If not provided, you can add certificates later using the certificate management endpoints. + + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/pki/syncs/chef \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-chef-cert-sync", + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "description": "an example certificate sync", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "destination": "chef", + "isAutoSyncEnabled": true, + "certificateIds": [ + "550e8400-e29b-41d4-a716-446655440000", + "660f1234-e29b-41d4-a716-446655440001" + ], + "syncOptions": { + "canRemoveCertificates": true, + "preserveSecretOnRenewal": true, + "canImportCertificates": false, + "includeRootCa": false, + "certificateNameSchema": "myapp-{{certificateId}}", + "fieldMappings": { + "certificate": "ssl_cert", + "privateKey": "ssl_key", + "certificateChain": "ssl_chain", + "caCertificate": "ssl_ca" + } + }, + "destinationConfig": { + "dataBagName": "ssl_certificates" + } + }' + ``` + + ### Example with Default Field Mappings + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/pki/syncs/chef \ + --header 'Authorization: Bearer ' \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-chef-cert-sync-default", + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "description": "Chef sync with default field mappings", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "destination": "chef", + "isAutoSyncEnabled": true, + "syncOptions": { + "canRemoveCertificates": true, + "preserveSecretOnRenewal": true, + "canImportCertificates": false, + "includeRootCa": false, + "certificateNameSchema": "{{commonName}}-{{certificateId}}", + "fieldMappings": { + "certificate": "certificate", + "privateKey": "private_key", + "certificateChain": "certificate_chain", + "caCertificate": "ca_certificate" + } + }, + "destinationConfig": { + "dataBagName": "certificates" + } + }' + ``` + + ### Sample response + + ```json Response + { + "pkiSync": { + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "name": "my-chef-cert-sync", + "description": "an example certificate sync", + "destination": "chef", + "isAutoSyncEnabled": true, + "destinationConfig": { + "dataBagName": "ssl_certificates" + }, + "syncOptions": { + "canRemoveCertificates": true, + "preserveSecretOnRenewal": true, + "canImportCertificates": false, + "includeRootCa": false, + "certificateNameSchema": "myapp-{{certificateId}}", + "fieldMappings": { + "certificate": "ssl_cert", + "privateKey": "ssl_key", + "certificateChain": "ssl_chain", + "caCertificate": "ssl_ca" + } + }, + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "createdAt": "2023-01-01T00:00:00.000Z", + "updatedAt": "2023-01-01T00:00:00.000Z" + } + } + ``` + + + + +## Certificate Management + +Your Chef Certificate Sync will: + +- **Automatic Deployment**: Deploy certificates in Infisical to Chef data bags with customizable field names +- **Certificate Updates**: Update certificates in Chef data bags when renewals occur +- **Expiration Handling**: Optionally remove expired certificates from Chef data bags (if enabled) +- **Format Preservation**: Maintain certificate format during sync operations +- **Field Customization**: Map certificate data to custom field names that match your Chef cookbook requirements +- **CA Certificate Support**: Include CA certificates in data bag items for complete certificate chain management + + + Chef Certificate Syncs support both automatic and manual + synchronization modes. When auto-sync is enabled, certificates are + automatically deployed as they are issued or renewed. + + +## Manual Certificate Sync + +You can manually trigger certificate synchronization to Chef using the sync certificates functionality. This is useful for: + +- Initial setup when you have existing certificates to deploy +- One-time sync of specific certificates +- Testing certificate sync configurations +- Force sync after making changes + +To manually sync certificates, use the [Sync Certificates](/api-reference/endpoints/pki/syncs/chef/sync-certificates) API endpoint or the manual sync option in the Infisical UI. + + + Chef does not support importing certificates back into Infisical + due to the nature of Chef data bags where certificates are stored as data + rather than managed certificate objects. + \ No newline at end of file diff --git a/docs/documentation/platform/pki/certificate-syncs/overview.mdx b/docs/documentation/platform/pki/certificate-syncs/overview.mdx index d7931d8936..b72be71f90 100644 --- a/docs/documentation/platform/pki/certificate-syncs/overview.mdx +++ b/docs/documentation/platform/pki/certificate-syncs/overview.mdx @@ -83,6 +83,7 @@ should be synced. Follow these steps to start syncing: - Certificates: The certificates you wish to push to the destination. - Options: Customize how certificates should be synced, including: - Whether certificates should be removed from the destination when they expire. + - Whether to include the Root CA certificate in the certificate chain. - Certificate naming schema to control how certificate names are generated in the destination. diff --git a/docs/images/platform/pki/certificate-syncs/aws-certificate-manager/acm-options.png b/docs/images/platform/pki/certificate-syncs/aws-certificate-manager/acm-options.png index 03254ee9f1..f1f5f32664 100644 Binary files a/docs/images/platform/pki/certificate-syncs/aws-certificate-manager/acm-options.png and b/docs/images/platform/pki/certificate-syncs/aws-certificate-manager/acm-options.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-certificates.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-certificates.png new file mode 100644 index 0000000000..58eaae85fd Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-certificates.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-destination.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-destination.png new file mode 100644 index 0000000000..559d7ab5cd Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-destination.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-details.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-details.png new file mode 100644 index 0000000000..d3617bcd15 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-details.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-field-mappings.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-field-mappings.png new file mode 100644 index 0000000000..04b158346c Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-field-mappings.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-options.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-options.png new file mode 100644 index 0000000000..b957bad8f6 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-options.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-review.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-review.png new file mode 100644 index 0000000000..23733e1bfd Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-review.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-synced.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-synced.png new file mode 100644 index 0000000000..b230bc5548 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/aws-secrets-manager-synced.png differ diff --git a/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/select-aws-secrets-manager-option.png b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/select-aws-secrets-manager-option.png new file mode 100644 index 0000000000..fbc4115b74 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/aws-secrets-manager/select-aws-secrets-manager-option.png differ diff --git a/docs/images/platform/pki/certificate-syncs/azure-key-vault/akv-options.png b/docs/images/platform/pki/certificate-syncs/azure-key-vault/akv-options.png index 69cd37d216..bea535d882 100644 Binary files a/docs/images/platform/pki/certificate-syncs/azure-key-vault/akv-options.png and b/docs/images/platform/pki/certificate-syncs/azure-key-vault/akv-options.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/chef-certificates.png b/docs/images/platform/pki/certificate-syncs/chef/chef-certificates.png new file mode 100644 index 0000000000..c4d7022ea9 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/chef-certificates.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/chef-destination.png b/docs/images/platform/pki/certificate-syncs/chef/chef-destination.png new file mode 100644 index 0000000000..b8cc5c3651 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/chef-destination.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/chef-details.png b/docs/images/platform/pki/certificate-syncs/chef/chef-details.png new file mode 100644 index 0000000000..aa568b2adb Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/chef-details.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/chef-field-mappings.png b/docs/images/platform/pki/certificate-syncs/chef/chef-field-mappings.png new file mode 100644 index 0000000000..de580f849d Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/chef-field-mappings.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/chef-options.png b/docs/images/platform/pki/certificate-syncs/chef/chef-options.png new file mode 100644 index 0000000000..7955d0ae6f Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/chef-options.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/chef-review.png b/docs/images/platform/pki/certificate-syncs/chef/chef-review.png new file mode 100644 index 0000000000..43f294585d Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/chef-review.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/chef-synced.png b/docs/images/platform/pki/certificate-syncs/chef/chef-synced.png new file mode 100644 index 0000000000..f4ed46add6 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/chef-synced.png differ diff --git a/docs/images/platform/pki/certificate-syncs/chef/select-chef-option.png b/docs/images/platform/pki/certificate-syncs/chef/select-chef-option.png new file mode 100644 index 0000000000..d7b8d9c9b2 Binary files /dev/null and b/docs/images/platform/pki/certificate-syncs/chef/select-chef-option.png differ diff --git a/docs/self-hosting/ee.mdx b/docs/self-hosting/ee.mdx index 83b2772e49..9c3e7d644c 100644 --- a/docs/self-hosting/ee.mdx +++ b/docs/self-hosting/ee.mdx @@ -14,14 +14,13 @@ This guide walks through how you can use these paid features on a self-hosted in Once purchased, you will be issued a license key. - Depending on whether or not the environment where Infisical is deployed has internet access, you may be issued a regular license or an offline license. + Set your license key as the value of the **LICENSE_KEY** environment variable within your Infisical instance. - - Assign the issued license key to the `LICENSE_KEY` environment variable in your Infisical instance. - - Your Infisical instance will need to communicate with the Infisical license server to validate the license key. + - Your Infisical instance will need to communicate with the Infisical license server to validate the license key. If you want to limit outgoing connections only to the Infisical license server, you can use the following IP addresses: `13.248.249.247` and `35.71.190.59` @@ -29,16 +28,18 @@ This guide walks through how you can use these paid features on a self-hosted in - - Assign the issued license key to the `LICENSE_KEY_OFFLINE` environment variable in your Infisical instance. + - Assign the issued offline license key to the `LICENSE_KEY` environment variable in your Infisical instance. + + - The system will automatically detect that it's an offline license based on the key format. - How you set the environment variable will depend on the deployment method you used. Please refer to the documentation of your deployment method for specific instructions. + While the LICENSE_KEY_OFFLINE environment variable continues to be supported for compatibility with existing configurations, we recommend transitioning to LICENSE_KEY for all license types going forward. - Once your instance starts up, the license key will be validated and you’ll be able to use the paid features. + Once your instance starts up, the license key will be validated and you'll be able to use the paid features. However, when the license expires, Infisical will continue to run, but EE features will be disabled until the license is renewed or a new one is purchased. - + diff --git a/frontend/src/components/pki-syncs/CreatePkiSyncModal.tsx b/frontend/src/components/pki-syncs/CreatePkiSyncModal.tsx index 0169b22965..5b38415a51 100644 --- a/frontend/src/components/pki-syncs/CreatePkiSyncModal.tsx +++ b/frontend/src/components/pki-syncs/CreatePkiSyncModal.tsx @@ -64,7 +64,7 @@ export const CreatePkiSyncModal = ({ "Add Sync" ) } - className="max-w-2xl" + className="max-w-3xl" bodyClassName="overflow-visible" subTitle={ selectedSync ? undefined : "Select a third-party service to sync certificates to." diff --git a/frontend/src/components/pki-syncs/EditPkiSyncModal.tsx b/frontend/src/components/pki-syncs/EditPkiSyncModal.tsx index db5745aad8..0fe3dabe60 100644 --- a/frontend/src/components/pki-syncs/EditPkiSyncModal.tsx +++ b/frontend/src/components/pki-syncs/EditPkiSyncModal.tsx @@ -15,11 +15,13 @@ type Props = { export const EditPkiSyncModal = ({ pkiSync, onOpenChange, fields, ...props }: Props) => { if (!pkiSync) return null; + const modalClassName = fields === PkiSyncEditFields.Mappings ? "max-w-4xl" : "max-w-2xl"; + return ( } - className="max-w-2xl" + className={modalClassName} bodyClassName="overflow-visible" > onOpenChange(false)} fields={fields} pkiSync={pkiSync} /> diff --git a/frontend/src/components/pki-syncs/forms/AwsSecretsManagerPkiSyncFields.tsx b/frontend/src/components/pki-syncs/forms/AwsSecretsManagerPkiSyncFields.tsx new file mode 100644 index 0000000000..fe9e52efb1 --- /dev/null +++ b/frontend/src/components/pki-syncs/forms/AwsSecretsManagerPkiSyncFields.tsx @@ -0,0 +1,50 @@ +import { Controller, useFormContext } from "react-hook-form"; + +import { FormControl, Select, SelectItem } from "@app/components/v2"; +import { AWS_REGIONS } from "@app/helpers/appConnections"; +import { PkiSync } from "@app/hooks/api/pkiSyncs"; + +import { TPkiSyncForm } from "./schemas/pki-sync-schema"; +import { PkiSyncConnectionField } from "./PkiSyncConnectionField"; + +export const AwsSecretsManagerPkiSyncFields = () => { + const { control, setValue } = useFormContext< + TPkiSyncForm & { destination: PkiSync.AwsSecretsManager } + >(); + + return ( + <> + { + setValue("destinationConfig.region", ""); + }} + /> + ( + + + + )} + /> + + ); +}; diff --git a/frontend/src/components/pki-syncs/forms/ChefPkiSyncFields.tsx b/frontend/src/components/pki-syncs/forms/ChefPkiSyncFields.tsx new file mode 100644 index 0000000000..ffd4de9074 --- /dev/null +++ b/frontend/src/components/pki-syncs/forms/ChefPkiSyncFields.tsx @@ -0,0 +1,35 @@ +import { Controller, useFormContext } from "react-hook-form"; + +import { FormControl, Input } from "@app/components/v2"; +import { PkiSync } from "@app/hooks/api/pkiSyncs"; + +import { TPkiSyncForm } from "./schemas/pki-sync-schema"; +import { PkiSyncConnectionField } from "./PkiSyncConnectionField"; + +export const ChefPkiSyncFields = () => { + const { control, setValue } = useFormContext(); + + return ( + <> + { + setValue("destinationConfig.dataBagName", ""); + }} + /> + ( + + + + )} + /> + + ); +}; diff --git a/frontend/src/components/pki-syncs/forms/CreatePkiSyncForm.tsx b/frontend/src/components/pki-syncs/forms/CreatePkiSyncForm.tsx index 1dee6eaa7f..068518a115 100644 --- a/frontend/src/components/pki-syncs/forms/CreatePkiSyncForm.tsx +++ b/frontend/src/components/pki-syncs/forms/CreatePkiSyncForm.tsx @@ -4,7 +4,6 @@ import { faInfoCircle } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { Tab } from "@headlessui/react"; import { zodResolver } from "@hookform/resolvers/zod"; -import { twMerge } from "tailwind-merge"; import { createNotification } from "@app/components/notifications"; import { Button, FormControl, Switch } from "@app/components/v2"; @@ -16,6 +15,7 @@ import { PkiSyncFormSchema, TPkiSyncForm } from "./schemas/pki-sync-schema"; import { PkiSyncCertificatesFields } from "./PkiSyncCertificatesFields"; import { PkiSyncDestinationFields } from "./PkiSyncDestinationFields"; import { PkiSyncDetailsFields } from "./PkiSyncDetailsFields"; +import { PkiSyncFieldMappingsFields } from "./PkiSyncFieldMappingsFields"; import { PkiSyncOptionsFields } from "./PkiSyncOptionsFields"; import { PkiSyncReviewFields } from "./PkiSyncReviewFields"; @@ -26,13 +26,38 @@ type Props = { initialData?: any; }; -const FORM_TABS: { name: string; key: string; fields: (keyof TPkiSyncForm)[] }[] = [ - { name: "Destination", key: "destination", fields: ["connection", "destinationConfig"] }, - { name: "Sync Options", key: "options", fields: ["syncOptions"] }, - { name: "Details", key: "details", fields: ["name", "description"] }, - { name: "Certificates", key: "certificates", fields: ["certificateIds"] }, - { name: "Review", key: "review", fields: [] } -]; +const getFormTabs = ( + destination: PkiSync +): { name: string; key: string; fields: (keyof TPkiSyncForm)[] }[] => { + const baseTabs = [ + { + name: "Destination", + key: "destination", + fields: ["connection", "destinationConfig"] as (keyof TPkiSyncForm)[] + }, + { name: "Sync Options", key: "options", fields: ["syncOptions"] as (keyof TPkiSyncForm)[] } + ]; + + if (destination === PkiSync.Chef || destination === PkiSync.AwsSecretsManager) { + baseTabs.push({ + name: "Mappings", + key: "mappings", + fields: ["syncOptions"] as (keyof TPkiSyncForm)[] + }); + } + + baseTabs.push( + { name: "Details", key: "details", fields: ["name", "description"] as (keyof TPkiSyncForm)[] }, + { + name: "Certificates", + key: "certificates", + fields: ["certificateIds"] as (keyof TPkiSyncForm)[] + }, + { name: "Review", key: "review", fields: [] as (keyof TPkiSyncForm)[] } + ); + + return baseTabs; +}; export const CreatePkiSyncForm = ({ destination, onComplete, onCancel, initialData }: Props) => { const createPkiSync = useCreatePkiSync(); @@ -42,6 +67,7 @@ export const CreatePkiSyncForm = ({ destination, onComplete, onCancel, initialDa const [showConfirmation, setShowConfirmation] = useState(false); const [selectedTabIndex, setSelectedTabIndex] = useState(0); + const FORM_TABS = getFormTabs(destination); const { syncOption } = usePkiSyncOption(destination); @@ -55,7 +81,19 @@ export const CreatePkiSyncForm = ({ destination, onComplete, onCancel, initialDa canImportCertificates: false, canRemoveCertificates: false, preserveArn: true, - certificateNameSchema: syncOption?.defaultCertificateNameSchema + certificateNameSchema: syncOption?.defaultCertificateNameSchema, + ...((destination === PkiSync.Chef || destination === PkiSync.AwsSecretsManager) && { + fieldMappings: { + certificate: "certificate", + privateKey: "private_key", + certificateChain: "certificate_chain", + caCertificate: "ca_certificate" + } + }), + ...(destination === PkiSync.AwsSecretsManager && { + preserveSecretOnRenewal: true, + updateExistingCertificates: true + }) }, ...initialData } as Partial, @@ -167,10 +205,10 @@ export const CreatePkiSyncForm = ({ destination, onComplete, onCancel, initialDa ); return ( -
+ - + {FORM_TABS.map((tab, index) => ( { @@ -191,11 +229,11 @@ export const CreatePkiSyncForm = ({ destination, onComplete, onCancel, initialDa ))} - - + + - + - + {(destination === PkiSync.Chef || destination === PkiSync.AwsSecretsManager) && ( + + + + )} + - + - + -
+
diff --git a/frontend/src/components/pki-syncs/forms/EditPkiSyncForm.tsx b/frontend/src/components/pki-syncs/forms/EditPkiSyncForm.tsx index 9041d32f25..380ba039e5 100644 --- a/frontend/src/components/pki-syncs/forms/EditPkiSyncForm.tsx +++ b/frontend/src/components/pki-syncs/forms/EditPkiSyncForm.tsx @@ -11,6 +11,7 @@ import { TPkiSync, useUpdatePkiSync } from "@app/hooks/api/pkiSyncs"; import { TUpdatePkiSyncForm, UpdatePkiSyncFormSchema } from "./schemas/pki-sync-schema"; import { PkiSyncDestinationFields } from "./PkiSyncDestinationFields"; import { PkiSyncDetailsFields } from "./PkiSyncDetailsFields"; +import { PkiSyncFieldMappingsFields } from "./PkiSyncFieldMappingsFields"; import { PkiSyncOptionsFields } from "./PkiSyncOptionsFields"; import { PkiSyncSourceFields } from "./PkiSyncSourceFields"; @@ -66,6 +67,9 @@ export const EditPkiSyncForm = ({ pkiSync, fields, onComplete }: Props) => { case PkiSyncEditFields.Options: Component = ; break; + case PkiSyncEditFields.Mappings: + Component = ; + break; case PkiSyncEditFields.Source: Component = ; break; diff --git a/frontend/src/components/pki-syncs/forms/PkiSyncDestinationFields.tsx b/frontend/src/components/pki-syncs/forms/PkiSyncDestinationFields.tsx index e7f90670db..56514c262a 100644 --- a/frontend/src/components/pki-syncs/forms/PkiSyncDestinationFields.tsx +++ b/frontend/src/components/pki-syncs/forms/PkiSyncDestinationFields.tsx @@ -4,7 +4,9 @@ import { PkiSync } from "@app/hooks/api/pkiSyncs"; import { TPkiSyncForm } from "./schemas/pki-sync-schema"; import { AwsCertificateManagerPkiSyncFields } from "./AwsCertificateManagerPkiSyncFields"; +import { AwsSecretsManagerPkiSyncFields } from "./AwsSecretsManagerPkiSyncFields"; import { AzureKeyVaultPkiSyncFields } from "./AzureKeyVaultPkiSyncFields"; +import { ChefPkiSyncFields } from "./ChefPkiSyncFields"; export const PkiSyncDestinationFields = () => { const { watch } = useFormContext(); @@ -16,6 +18,10 @@ export const PkiSyncDestinationFields = () => { return ; case PkiSync.AwsCertificateManager: return ; + case PkiSync.AwsSecretsManager: + return ; + case PkiSync.Chef: + return ; default: return (
diff --git a/frontend/src/components/pki-syncs/forms/PkiSyncFieldMappingsFields.tsx b/frontend/src/components/pki-syncs/forms/PkiSyncFieldMappingsFields.tsx new file mode 100644 index 0000000000..922ad8e6f1 --- /dev/null +++ b/frontend/src/components/pki-syncs/forms/PkiSyncFieldMappingsFields.tsx @@ -0,0 +1,103 @@ +import { Controller, useFormContext } from "react-hook-form"; + +import { FormControl, Input } from "@app/components/v2"; +import { PkiSync } from "@app/hooks/api/pkiSyncs"; + +import { TPkiSyncForm } from "./schemas/pki-sync-schema"; + +type Props = { + destination?: PkiSync; +}; + +export const PkiSyncFieldMappingsFields = ({ destination }: Props) => { + const { control, watch } = useFormContext(); + const currentDestination = destination || watch("destination"); + + if (currentDestination !== PkiSync.Chef && currentDestination !== PkiSync.AwsSecretsManager) { + return null; + } + + return ( + <> +

+ Configure how certificate fields are mapped to your{" "} + {currentDestination === PkiSync.Chef ? "Chef data bag items" : "AWS secrets"}. +

+ +
+ ( + + + + )} + /> + + ( + + + + )} + /> + + ( + + + + )} + /> + + ( + + + + )} + /> +
+ +
+

Preview JSON Structure

+
+          {`{
+  "id": "certificate-item-name",
+  "${watch("syncOptions.fieldMappings.certificate") || "certificate"}": "",
+  "${watch("syncOptions.fieldMappings.privateKey") || "private_key"}": "",
+  "${watch("syncOptions.fieldMappings.certificateChain") || "certificate_chain"}": "",
+  "${watch("syncOptions.fieldMappings.caCertificate") || "ca_certificate"}": ""
+}`}
+        
+
+ + ); +}; diff --git a/frontend/src/components/pki-syncs/forms/PkiSyncOptionsFields/PkiSyncOptionsFields.tsx b/frontend/src/components/pki-syncs/forms/PkiSyncOptionsFields/PkiSyncOptionsFields.tsx index c1313e684a..b1763d2d86 100644 --- a/frontend/src/components/pki-syncs/forms/PkiSyncOptionsFields/PkiSyncOptionsFields.tsx +++ b/frontend/src/components/pki-syncs/forms/PkiSyncOptionsFields/PkiSyncOptionsFields.tsx @@ -95,6 +95,48 @@ export const PkiSyncOptionsFields = ({ destination }: Props) => { )} /> + ( + + +

+ Include Root CA in Certificate Chain{" "} + +

+ When enabled, the full certificate chain including the root CA will be + synced to the destination. +

+

+ When disabled, the root CA will be excluded from the certificate chain + during sync operations, reducing the size of the synced certificate chain. +

+

+ Most applications and services work correctly with intermediate certificates + only, as they can validate the trust chain up to a root CA they already + trust. +

+ + } + > + + +

+
+
+ )} + /> + {currentDestination === PkiSync.AwsCertificateManager && ( { /> )} + {currentDestination === PkiSync.AwsSecretsManager && ( + ( + + +

+ Preserve Secret on Renewal{" "} + +

+ Only applies to certificate renewals: When a certificate + is renewed in Infisical, this option controls how the renewed certificate + is handled in AWS Secrets Manager. +

+

+ When enabled, the renewed certificate will update the existing secret, + preserving the same secret name and ARN. This allows consuming services to + continue using the same secret reference without requiring updates. +

+

+ When disabled, the renewed certificate will be created as a new secret + with a new name, and the old secret will be removed. +

+ + } + > + + +

+
+
+ )} + /> + )} + + {currentDestination === PkiSync.Chef && ( + ( + + +

+ Preserve Data Bag Item on Renewal{" "} + +

+ Only applies to certificate renewals: When a certificate + is renewed in Infisical, this option controls how the renewed certificate + is handled in Chef. +

+

+ When enabled, the renewed certificate will update the existing data bag + item, preserving the same item name. This allows consuming services to + continue using the same data bag item without requiring updates to Chef + cookbooks or recipes. +

+

+ When disabled, the renewed certificate will be created as a new data bag + item with a new name, and the old item will be removed. +

+ + } + > + + +

+
+
+ )} + /> + )} + { + if (!val) return true; + + const allowedOptionalPlaceholders = [ + "{{environment}}", + "{{profileId}}", + "{{commonName}}", + "{{friendlyName}}" + ]; + + const allowedPlaceholdersRegexPart = ["{{certificateId}}", ...allowedOptionalPlaceholders] + .map((p) => p.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&")) + .join("|"); + + const allowedContentRegex = new RegExp( + `^([a-zA-Z0-9_\\-]|${allowedPlaceholdersRegexPart})*$` + ); + const contentIsValid = allowedContentRegex.test(val); + + if (val.trim()) { + const certificateIdRegex = /\{\{certificateId\}\}/; + const certificateIdIsPresent = certificateIdRegex.test(val); + return contentIsValid && certificateIdIsPresent; + } + + return contentIsValid; + }, + { + message: + "Certificate name schema must include exactly one {{certificateId}} placeholder. It can also include {{environment}}, {{profileId}}, {{commonName}}, or {{friendlyName}} placeholders. Only alphanumeric characters (a-z, A-Z, 0-9), hyphens (-), and underscores (_) are allowed besides the placeholders." + } + ), + fieldMappings: AwsSecretsManagerFieldMappingsSchema.optional().default({ + certificate: "certificate", + privateKey: "private_key", + certificateChain: "certificate_chain", + caCertificate: "ca_certificate" + }) +}); + +export const AwsSecretsManagerPkiSyncDestinationSchema = BasePkiSyncSchema( + AwsSecretsManagerSyncOptionsSchema +).merge( + z.object({ + destination: z.literal(PkiSync.AwsSecretsManager), + destinationConfig: z.object({ + region: z.string().min(1, "AWS region is required") + }) + }) +); + +export const UpdateAwsSecretsManagerPkiSyncDestinationSchema = + AwsSecretsManagerPkiSyncDestinationSchema.partial().merge( + z.object({ + name: z + .string() + .trim() + .min(1, "Name is required") + .max(255, "Name must be less than 255 characters"), + destination: z.literal(PkiSync.AwsSecretsManager), + connection: z.object({ + id: z.string().uuid("Invalid connection ID format"), + name: z + .string() + .min(1, "Connection name is required") + .max(255, "Connection name must be less than 255 characters") + }) + }) + ); diff --git a/frontend/src/components/pki-syncs/forms/schemas/azure-key-vault-pki-sync-destination-schema.ts b/frontend/src/components/pki-syncs/forms/schemas/azure-key-vault-pki-sync-destination-schema.ts index 2a8f3ee53c..653c0fa0ed 100644 --- a/frontend/src/components/pki-syncs/forms/schemas/azure-key-vault-pki-sync-destination-schema.ts +++ b/frontend/src/components/pki-syncs/forms/schemas/azure-key-vault-pki-sync-destination-schema.ts @@ -7,6 +7,7 @@ import { BasePkiSyncSchema } from "./base-pki-sync-schema"; const AzureKeyVaultSyncOptionsSchema = z.object({ canImportCertificates: z.boolean().default(false), canRemoveCertificates: z.boolean().default(true), + includeRootCa: z.boolean().default(false), enableVersioning: z.boolean().default(true), certificateNameSchema: z .string() diff --git a/frontend/src/components/pki-syncs/forms/schemas/base-pki-sync-schema.ts b/frontend/src/components/pki-syncs/forms/schemas/base-pki-sync-schema.ts index 73da1f6af0..0ff6121cf4 100644 --- a/frontend/src/components/pki-syncs/forms/schemas/base-pki-sync-schema.ts +++ b/frontend/src/components/pki-syncs/forms/schemas/base-pki-sync-schema.ts @@ -6,6 +6,7 @@ export const BasePkiSyncSchema = { + if (!val) return true; + + const allowedOptionalPlaceholders = [ + "{{environment}}", + "{{profileId}}", + "{{commonName}}", + "{{friendlyName}}" + ]; + + const allowedPlaceholdersRegexPart = ["{{certificateId}}", ...allowedOptionalPlaceholders] + .map((p) => p.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&")) + .join("|"); + + const allowedContentRegex = new RegExp( + `^([a-zA-Z0-9_\\-]|${allowedPlaceholdersRegexPart})*$` + ); + const contentIsValid = allowedContentRegex.test(val); + + if (val.trim()) { + const certificateIdRegex = /\{\{certificateId\}\}/; + const certificateIdIsPresent = certificateIdRegex.test(val); + return contentIsValid && certificateIdIsPresent; + } + + return contentIsValid; + }, + { + message: + "Certificate item name schema must include exactly one {{certificateId}} placeholder. It can also include {{environment}}, {{profileId}}, {{commonName}}, or {{friendlyName}} placeholders. Only alphanumeric characters (a-z, A-Z, 0-9), hyphens (-), and underscores (_) are allowed besides the placeholders." + } + ), + fieldMappings: ChefFieldMappingsSchema.optional().default({ + certificate: "certificate", + privateKey: "private_key", + certificateChain: "certificate_chain", + caCertificate: "ca_certificate" + }) +}); + +export const ChefPkiSyncDestinationSchema = BasePkiSyncSchema(ChefSyncOptionsSchema).merge( + z.object({ + destination: z.literal(PkiSync.Chef), + destinationConfig: z.object({ + dataBagName: z + .string() + .min(1, "Data bag name is required") + .max(255, "Data bag name must be less than 255 characters") + .regex( + /^[a-zA-Z0-9_-]+$/, + "Data bag name can only contain alphanumeric characters, underscores, and hyphens" + ) + }) + }) +); + +export const UpdateChefPkiSyncDestinationSchema = ChefPkiSyncDestinationSchema.partial().merge( + z.object({ + name: z + .string() + .trim() + .min(1, "Name is required") + .max(255, "Name must be less than 255 characters"), + destination: z.literal(PkiSync.Chef), + connection: z.object({ + id: z.string().uuid("Invalid connection ID format"), + name: z + .string() + .min(1, "Connection name is required") + .max(255, "Connection name must be less than 255 characters") + }) + }) +); diff --git a/frontend/src/components/pki-syncs/forms/schemas/pki-sync-schema.ts b/frontend/src/components/pki-syncs/forms/schemas/pki-sync-schema.ts index 6efa5e24da..559fcdebc9 100644 --- a/frontend/src/components/pki-syncs/forms/schemas/pki-sync-schema.ts +++ b/frontend/src/components/pki-syncs/forms/schemas/pki-sync-schema.ts @@ -4,19 +4,31 @@ import { AwsCertificateManagerPkiSyncDestinationSchema, UpdateAwsCertificateManagerPkiSyncDestinationSchema } from "./aws-certificate-manager-pki-sync-destination-schema"; +import { + AwsSecretsManagerPkiSyncDestinationSchema, + UpdateAwsSecretsManagerPkiSyncDestinationSchema +} from "./aws-secrets-manager-pki-sync-destination-schema"; import { AzureKeyVaultPkiSyncDestinationSchema, UpdateAzureKeyVaultPkiSyncDestinationSchema } from "./azure-key-vault-pki-sync-destination-schema"; +import { + ChefPkiSyncDestinationSchema, + UpdateChefPkiSyncDestinationSchema +} from "./chef-pki-sync-destination-schema"; const PkiSyncUnionSchema = z.discriminatedUnion("destination", [ AzureKeyVaultPkiSyncDestinationSchema, - AwsCertificateManagerPkiSyncDestinationSchema + AwsCertificateManagerPkiSyncDestinationSchema, + AwsSecretsManagerPkiSyncDestinationSchema, + ChefPkiSyncDestinationSchema ]); const UpdatePkiSyncUnionSchema = z.discriminatedUnion("destination", [ UpdateAzureKeyVaultPkiSyncDestinationSchema, - UpdateAwsCertificateManagerPkiSyncDestinationSchema + UpdateAwsCertificateManagerPkiSyncDestinationSchema, + UpdateAwsSecretsManagerPkiSyncDestinationSchema, + UpdateChefPkiSyncDestinationSchema ]); export const PkiSyncFormSchema = PkiSyncUnionSchema; diff --git a/frontend/src/components/pki-syncs/types/index.ts b/frontend/src/components/pki-syncs/types/index.ts index 093d065ab0..954be4ff27 100644 --- a/frontend/src/components/pki-syncs/types/index.ts +++ b/frontend/src/components/pki-syncs/types/index.ts @@ -1,6 +1,7 @@ export enum PkiSyncEditFields { Details = "details", Options = "options", + Mappings = "mappings", Source = "source", Destination = "destination" } diff --git a/frontend/src/helpers/pkiSyncs.ts b/frontend/src/helpers/pkiSyncs.ts index d8c87853df..471a4da059 100644 --- a/frontend/src/helpers/pkiSyncs.ts +++ b/frontend/src/helpers/pkiSyncs.ts @@ -15,10 +15,20 @@ export const PKI_SYNC_MAP: Record< [PkiSync.AwsCertificateManager]: { name: "AWS Certificate Manager", image: "Amazon Web Services.png" + }, + [PkiSync.AwsSecretsManager]: { + name: "AWS Secrets Manager", + image: "Amazon Web Services.png" + }, + [PkiSync.Chef]: { + name: "Chef", + image: "Chef.png" } }; export const PKI_SYNC_CONNECTION_MAP: Record = { [PkiSync.AzureKeyVault]: AppConnection.AzureKeyVault, - [PkiSync.AwsCertificateManager]: AppConnection.AWS + [PkiSync.AwsCertificateManager]: AppConnection.AWS, + [PkiSync.AwsSecretsManager]: AppConnection.AWS, + [PkiSync.Chef]: AppConnection.Chef }; diff --git a/frontend/src/hooks/api/groups/index.tsx b/frontend/src/hooks/api/groups/index.tsx index c23a558328..eebe2ccc3a 100644 --- a/frontend/src/hooks/api/groups/index.tsx +++ b/frontend/src/hooks/api/groups/index.tsx @@ -5,4 +5,4 @@ export { useRemoveUserFromGroup, useUpdateGroup } from "./mutations"; -export { useGetGroupById, useListGroupUsers } from "./queries"; +export { useGetGroupById, useListGroupProjects, useListGroupUsers } from "./queries"; diff --git a/frontend/src/hooks/api/groups/queries.tsx b/frontend/src/hooks/api/groups/queries.tsx index ca524066f0..2e5b9f6841 100644 --- a/frontend/src/hooks/api/groups/queries.tsx +++ b/frontend/src/hooks/api/groups/queries.tsx @@ -2,7 +2,14 @@ import { useQuery } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; -import { EFilterReturnedUsers, TGroup, TGroupUser } from "./types"; +import { OrderByDirection } from "../generic/types"; +import { + EFilterReturnedProjects, + EFilterReturnedUsers, + TGroup, + TGroupProject, + TGroupUser +} from "./types"; export const groupKeys = { getGroupById: (groupId: string) => [{ groupId }, "group"] as const, @@ -41,6 +48,29 @@ export const groupKeys = { ...groupKeys.forGroupUserMemberships(slug), projectId, { offset, limit, search, filter } + ] as const, + allGroupProjects: () => ["group-projects"] as const, + forGroupProjects: (groupId: string) => [...groupKeys.allGroupProjects(), groupId] as const, + specificGroupProjects: ({ + groupId, + offset, + limit, + search, + filter, + orderBy, + orderDirection + }: { + groupId: string; + offset: number; + limit: number; + search: string; + filter?: EFilterReturnedProjects; + orderBy?: string; + orderDirection?: OrderByDirection; + }) => + [ + ...groupKeys.forGroupProjects(groupId), + { offset, limit, search, filter, orderBy, orderDirection } ] as const }; @@ -148,3 +178,54 @@ export const useListProjectGroupUsers = ({ } }); }; + +export const useListGroupProjects = ({ + id, + offset = 0, + limit = 10, + search, + filter, + orderBy, + orderDirection +}: { + id: string; + offset: number; + limit: number; + search: string; + orderBy?: string; + orderDirection?: OrderByDirection; + filter?: EFilterReturnedProjects; +}) => { + return useQuery({ + queryKey: groupKeys.specificGroupProjects({ + groupId: id, + offset, + limit, + search, + filter, + orderBy, + orderDirection + }), + enabled: Boolean(id), + placeholderData: (previousData) => previousData, + queryFn: async () => { + const params = new URLSearchParams({ + offset: String(offset), + limit: String(limit), + search, + ...(filter && { filter }), + ...(orderBy && { orderBy }), + ...(orderDirection && { orderDirection }) + }); + + const { data } = await apiRequest.get<{ projects: TGroupProject[]; totalCount: number }>( + `/api/v1/groups/${id}/projects`, + { + params + } + ); + + return data; + } + }); +}; diff --git a/frontend/src/hooks/api/groups/types.ts b/frontend/src/hooks/api/groups/types.ts index 6bc82b39e0..1c16a331b6 100644 --- a/frontend/src/hooks/api/groups/types.ts +++ b/frontend/src/hooks/api/groups/types.ts @@ -52,7 +52,21 @@ export type TGroupUser = { joinedGroupAt: Date; }; +export type TGroupProject = { + id: string; + name: string; + slug: string; + description: string; + type: string; + joinedGroupAt: Date; +}; + export enum EFilterReturnedUsers { EXISTING_MEMBERS = "existingMembers", NON_MEMBERS = "nonMembers" } + +export enum EFilterReturnedProjects { + ASSIGNED_PROJECTS = "assignedProjects", + UNASSIGNED_PROJECTS = "unassignedProjects" +} diff --git a/frontend/src/hooks/api/pkiSyncs/enums.ts b/frontend/src/hooks/api/pkiSyncs/enums.ts index 0145162302..ee9af0c4f0 100644 --- a/frontend/src/hooks/api/pkiSyncs/enums.ts +++ b/frontend/src/hooks/api/pkiSyncs/enums.ts @@ -1,6 +1,8 @@ export enum PkiSync { AzureKeyVault = "azure-key-vault", - AwsCertificateManager = "aws-certificate-manager" + AwsCertificateManager = "aws-certificate-manager", + AwsSecretsManager = "aws-secrets-manager", + Chef = "chef" } export enum PkiSyncStatus { @@ -12,7 +14,7 @@ export enum PkiSyncStatus { export enum CertificateSyncStatus { Pending = "pending", - Syncing = "syncing", + Running = "running", Succeeded = "succeeded", Failed = "failed" } diff --git a/frontend/src/hooks/api/pkiSyncs/types/aws-secrets-manager-sync.ts b/frontend/src/hooks/api/pkiSyncs/types/aws-secrets-manager-sync.ts new file mode 100644 index 0000000000..1558678dcf --- /dev/null +++ b/frontend/src/hooks/api/pkiSyncs/types/aws-secrets-manager-sync.ts @@ -0,0 +1,29 @@ +import { AppConnection } from "@app/hooks/api/appConnections/enums"; + +import { PkiSync } from "../enums"; +import { TRootPkiSync } from "./common"; + +export type TAwsSecretsManagerFieldMappings = { + certificate: string; + privateKey: string; + certificateChain: string; + caCertificate: string; +}; + +export type TAwsSecretsManagerPkiSync = TRootPkiSync & { + destination: PkiSync.AwsSecretsManager; + destinationConfig: { + region: string; + keyId?: string; + }; + connection: { + app: AppConnection.AWS; + name: string; + id: string; + }; + syncOptions: TRootPkiSync["syncOptions"] & { + fieldMappings?: TAwsSecretsManagerFieldMappings; + preserveSecretOnRenewal?: boolean; + updateExistingCertificates?: boolean; + }; +}; diff --git a/frontend/src/hooks/api/pkiSyncs/types/chef-sync.ts b/frontend/src/hooks/api/pkiSyncs/types/chef-sync.ts new file mode 100644 index 0000000000..25c1c3f864 --- /dev/null +++ b/frontend/src/hooks/api/pkiSyncs/types/chef-sync.ts @@ -0,0 +1,16 @@ +import { AppConnection } from "@app/hooks/api/appConnections/enums"; + +import { PkiSync } from "../enums"; +import { TRootPkiSync } from "./common"; + +export type TChefPkiSync = TRootPkiSync & { + destination: PkiSync.Chef; + destinationConfig: { + dataBagName: string; + }; + connection: { + app: AppConnection.Chef; + name: string; + id: string; + }; +}; diff --git a/frontend/src/hooks/api/pkiSyncs/types/common.ts b/frontend/src/hooks/api/pkiSyncs/types/common.ts index 77dc785ee4..711a33a754 100644 --- a/frontend/src/hooks/api/pkiSyncs/types/common.ts +++ b/frontend/src/hooks/api/pkiSyncs/types/common.ts @@ -2,11 +2,23 @@ import { AppConnection } from "@app/hooks/api/appConnections/enums"; import { CertificateSyncStatus, PkiSyncStatus } from "../enums"; +export type TChefFieldMappings = { + certificate: string; + privateKey: string; + certificateChain: string; + caCertificate: string; +}; + export type RootPkiSyncOptions = { canImportCertificates: boolean; canRemoveCertificates: boolean; certificateNamePrefix?: string; certificateNameSchema?: string; + preserveArn?: boolean; + enableVersioning?: boolean; + preserveItemOnRenewal?: boolean; + updateExistingCertificates?: boolean; + fieldMappings?: TChefFieldMappings; }; export type TRootPkiSync = { diff --git a/frontend/src/hooks/api/pkiSyncs/types/index.ts b/frontend/src/hooks/api/pkiSyncs/types/index.ts index 69ec3ae6f7..53b910bf79 100644 --- a/frontend/src/hooks/api/pkiSyncs/types/index.ts +++ b/frontend/src/hooks/api/pkiSyncs/types/index.ts @@ -1,7 +1,9 @@ import { PkiSync } from "@app/hooks/api/pkiSyncs"; import { TAwsCertificateManagerPkiSync } from "./aws-certificate-manager-sync"; +import { TAwsSecretsManagerPkiSync } from "./aws-secrets-manager-sync"; import { TAzureKeyVaultPkiSync } from "./azure-key-vault-sync"; +import { TChefPkiSync } from "./chef-sync"; export type TPkiSyncOption = { name: string; @@ -16,7 +18,11 @@ export type TPkiSyncOption = { minCertificateNameLength?: number; }; -export type TPkiSync = TAzureKeyVaultPkiSync | TAwsCertificateManagerPkiSync; +export type TPkiSync = + | TAzureKeyVaultPkiSync + | TAwsCertificateManagerPkiSync + | TAwsSecretsManagerPkiSync + | TChefPkiSync; export type TListPkiSyncs = { pkiSyncs: TPkiSync[] }; @@ -31,6 +37,17 @@ type TCreatePkiSyncDTOBase = { canRemoveCertificates: boolean; certificateNamePrefix?: string; certificateNameSchema?: string; + preserveArn?: boolean; + enableVersioning?: boolean; + preserveItemOnRenewal?: boolean; + updateExistingCertificates?: boolean; + preserveSecretOnRenewal?: boolean; + fieldMappings?: { + certificate: string; + privateKey: string; + certificateChain: string; + caCertificate: string; + }; }; isAutoSyncEnabled: boolean; subscriberId?: string | null; @@ -43,6 +60,7 @@ export type TCreatePkiSyncDTO = TCreatePkiSyncDTOBase & { destinationConfig: { vaultBaseUrl?: string; region?: string; + dataBagName?: string; }; }; @@ -76,5 +94,7 @@ export type TTriggerPkiSyncRemoveCertificatesDTO = { }; export * from "./aws-certificate-manager-sync"; +export * from "./aws-secrets-manager-sync"; export * from "./azure-key-vault-sync"; +export * from "./chef-sync"; export * from "./common"; diff --git a/frontend/src/hooks/api/projects/mutations.tsx b/frontend/src/hooks/api/projects/mutations.tsx index e2a80ff536..4162877366 100644 --- a/frontend/src/hooks/api/projects/mutations.tsx +++ b/frontend/src/hooks/api/projects/mutations.tsx @@ -2,6 +2,7 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { apiRequest } from "@app/config/request"; +import { groupKeys } from "../groups/queries"; import { userKeys } from "../users/query-keys"; import { projectKeys } from "./query-keys"; import { @@ -30,10 +31,11 @@ export const useAddGroupToWorkspace = () => { return groupMembership; }, - onSuccess: (_, { projectId }) => { + onSuccess: (_, { projectId, groupId }) => { queryClient.invalidateQueries({ queryKey: projectKeys.getProjectGroupMemberships(projectId) }); + queryClient.invalidateQueries({ queryKey: groupKeys.forGroupProjects(groupId) }); } }); }; @@ -77,11 +79,13 @@ export const useDeleteGroupFromWorkspace = () => { } = await apiRequest.delete(`/api/v1/projects/${projectId}/groups/${groupId}`); return groupMembership; }, - onSuccess: (_, { projectId, username }) => { + onSuccess: (_, { projectId, username, groupId }) => { queryClient.invalidateQueries({ queryKey: projectKeys.getProjectGroupMemberships(projectId) }); + queryClient.invalidateQueries({ queryKey: groupKeys.forGroupProjects(groupId) }); + if (username) { queryClient.invalidateQueries({ queryKey: userKeys.listUserGroupMemberships(username) }); } diff --git a/frontend/src/pages/cert-manager/CertificatesPage/components/CertificateIssuanceModal.tsx b/frontend/src/pages/cert-manager/CertificatesPage/components/CertificateIssuanceModal.tsx index 81bc5461f9..49ffe34945 100644 --- a/frontend/src/pages/cert-manager/CertificatesPage/components/CertificateIssuanceModal.tsx +++ b/frontend/src/pages/cert-manager/CertificatesPage/components/CertificateIssuanceModal.tsx @@ -354,6 +354,7 @@ export const CertificateIssuanceModal = ({ popUp, handlePopUpToggle, profileId }

Certificate Details

Serial Number: {cert.serialNumber}

+

Certificate Id: {cert.id}

Common Name: {cert.commonName}

Status: {cert.status}

diff --git a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/PkiSyncDetailsByIDPage.tsx b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/PkiSyncDetailsByIDPage.tsx index b1b1b2836e..16b105bc6b 100644 --- a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/PkiSyncDetailsByIDPage.tsx +++ b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/PkiSyncDetailsByIDPage.tsx @@ -21,6 +21,7 @@ import { PkiSyncCertificatesSection, PkiSyncDestinationSection, PkiSyncDetailsSection, + PkiSyncFieldMappingsSection, PkiSyncOptionsSection } from "./components"; @@ -63,6 +64,7 @@ const PageContent = () => { const handleEditDetails = () => handlePopUpOpen("editSync", PkiSyncEditFields.Details); const handleEditOptions = () => handlePopUpOpen("editSync", PkiSyncEditFields.Options); + const handleEditMappings = () => handlePopUpOpen("editSync", PkiSyncEditFields.Mappings); const handleEditDestination = () => handlePopUpOpen("editSync", PkiSyncEditFields.Destination); return ( @@ -103,6 +105,7 @@ const PageContent = () => {
+
{ if (status === CertificateSyncStatus.Succeeded) return "success"; if (status === CertificateSyncStatus.Failed) return "danger"; - if (status === CertificateSyncStatus.Syncing) return "neutral"; + if (status === CertificateSyncStatus.Running) return "neutral"; return "project"; }; const getSyncStatusText = (status?: CertificateSyncStatus | null) => { if (status === CertificateSyncStatus.Succeeded) return "Synced"; if (status === CertificateSyncStatus.Failed) return "Failed"; - if (status === CertificateSyncStatus.Syncing) return "Syncing"; + if (status === CertificateSyncStatus.Running) return "Syncing"; if (status === CertificateSyncStatus.Pending) return "Pending"; return "Unknown"; }; diff --git a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection.tsx b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection.tsx index 1c6bb8a04b..c59d2df6dd 100644 --- a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection.tsx +++ b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection.tsx @@ -13,7 +13,9 @@ import { PkiSync, TPkiSync } from "@app/hooks/api/pkiSyncs"; import { AwsCertificateManagerPkiSyncDestinationSection, - AzureKeyVaultPkiSyncDestinationSection + AwsSecretsManagerPkiSyncDestinationSection, + AzureKeyVaultPkiSyncDestinationSection, + ChefPkiSyncDestinationSection } from "./PkiSyncDestinationSection/index"; const GenericFieldLabel = ({ label, children }: { label: string; children: React.ReactNode }) => ( @@ -38,9 +40,15 @@ export const PkiSyncDestinationSection = ({ pkiSync, onEditDestination }: Props) case PkiSync.AwsCertificateManager: DestinationComponents = ; break; + case PkiSync.AwsSecretsManager: + DestinationComponents = ; + break; case PkiSync.AzureKeyVault: DestinationComponents = ; break; + case PkiSync.Chef: + DestinationComponents = ; + break; default: // For future destinations, return null (no additional fields to show) DestinationComponents = null; diff --git a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/AwsSecretsManagerPkiSyncDestinationSection.tsx b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/AwsSecretsManagerPkiSyncDestinationSection.tsx new file mode 100644 index 0000000000..ba97b73cf7 --- /dev/null +++ b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/AwsSecretsManagerPkiSyncDestinationSection.tsx @@ -0,0 +1,28 @@ +import { TAwsSecretsManagerPkiSync, TPkiSync } from "@app/hooks/api/pkiSyncs"; + +const GenericFieldLabel = ({ label, children }: { label: string; children: React.ReactNode }) => ( +
+

{label}

+
{children}
+
+); + +type Props = { + pkiSync: TPkiSync; +}; + +export const AwsSecretsManagerPkiSyncDestinationSection = ({ pkiSync }: Props) => { + const awsSecretsManagerPkiSync = pkiSync as TAwsSecretsManagerPkiSync; + const { destinationConfig } = awsSecretsManagerPkiSync; + + return ( + <> + + {destinationConfig.region || "us-east-1"} + + {destinationConfig.keyId && ( + {destinationConfig.keyId} + )} + + ); +}; diff --git a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/ChefPkiSyncDestinationSection.tsx b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/ChefPkiSyncDestinationSection.tsx new file mode 100644 index 0000000000..7d29c91c19 --- /dev/null +++ b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/ChefPkiSyncDestinationSection.tsx @@ -0,0 +1,25 @@ +import { TPkiSync } from "@app/hooks/api/pkiSyncs"; + +const GenericFieldLabel = ({ label, children }: { label: string; children: React.ReactNode }) => ( +
+

{label}

+
{children}
+
+); + +type Props = { + pkiSync: TPkiSync; +}; + +export const ChefPkiSyncDestinationSection = ({ pkiSync }: Props) => { + const dataBagName = + pkiSync.destinationConfig && "dataBagName" in pkiSync.destinationConfig + ? pkiSync.destinationConfig.dataBagName + : undefined; + + return ( + + {dataBagName || "Not specified"} + + ); +}; diff --git a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/index.ts b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/index.ts index 4a1728f42a..c963f7284f 100644 --- a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/index.ts +++ b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncDestinationSection/index.ts @@ -1,2 +1,4 @@ export { AwsCertificateManagerPkiSyncDestinationSection } from "./AwsCertificateManagerPkiSyncDestinationSection"; +export { AwsSecretsManagerPkiSyncDestinationSection } from "./AwsSecretsManagerPkiSyncDestinationSection"; export { AzureKeyVaultPkiSyncDestinationSection } from "./AzureKeyVaultPkiSyncDestinationSection"; +export { ChefPkiSyncDestinationSection } from "./ChefPkiSyncDestinationSection"; diff --git a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncFieldMappingsSection.tsx b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncFieldMappingsSection.tsx new file mode 100644 index 0000000000..66f0ea0761 --- /dev/null +++ b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/PkiSyncFieldMappingsSection.tsx @@ -0,0 +1,92 @@ +import { subject } from "@casl/ability"; +import { faEdit } from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; + +import { ProjectPermissionCan } from "@app/components/permissions"; +import { IconButton } from "@app/components/v2"; +import { Badge } from "@app/components/v3"; +import { ProjectPermissionSub } from "@app/context"; +import { ProjectPermissionPkiSyncActions } from "@app/context/ProjectPermissionContext/types"; +import { PkiSync, TPkiSync } from "@app/hooks/api/pkiSyncs"; + +const GenericFieldLabel = ({ + label, + children, + labelClassName +}: { + label: string; + children: React.ReactNode; + labelClassName?: string; +}) => ( +
+

{label}

+
{children}
+
+); + +type Props = { + pkiSync: TPkiSync; + onEditMappings: VoidFunction; +}; + +export const PkiSyncFieldMappingsSection = ({ pkiSync, onEditMappings }: Props) => { + if (pkiSync.destination !== PkiSync.Chef && pkiSync.destination !== PkiSync.AwsSecretsManager) { + return null; + } + + const fieldMappings = pkiSync.syncOptions?.fieldMappings; + + const permissionSubject = subject(ProjectPermissionSub.PkiSyncs, { + subscriberId: pkiSync.subscriberId || "" + }); + + return ( +
+
+
+

Field Mappings

+ + {(isAllowed) => ( + + + + )} + +
+
+
+ + + {fieldMappings?.certificate || "certificate"} + + + + + + {fieldMappings?.privateKey || "private_key"} + + + + + + {fieldMappings?.certificateChain || "certificate_chain"} + + + + + + {fieldMappings?.caCertificate || "ca_certificate"} + + +
+
+
+
+ ); +}; diff --git a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/index.ts b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/index.ts index 55a877bffd..5e6baa9736 100644 --- a/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/index.ts +++ b/frontend/src/pages/cert-manager/PkiSyncDetailsByIDPage/components/index.ts @@ -3,5 +3,6 @@ export { PkiSyncAuditLogsSection } from "./PkiSyncAuditLogsSection"; export { PkiSyncCertificatesSection } from "./PkiSyncCertificatesSection"; export { PkiSyncDestinationSection } from "./PkiSyncDestinationSection"; export { PkiSyncDetailsSection } from "./PkiSyncDetailsSection"; +export { PkiSyncFieldMappingsSection } from "./PkiSyncFieldMappingsSection"; export { PkiSyncOptionsSection } from "./PkiSyncOptionsSection"; export { PkiSyncSourceSection } from "./PkiSyncSourceSection"; diff --git a/frontend/src/pages/organization/GroupDetailsByIDPage/GroupDetailsByIDPage.tsx b/frontend/src/pages/organization/GroupDetailsByIDPage/GroupDetailsByIDPage.tsx index e7c517d6ce..d925f20b07 100644 --- a/frontend/src/pages/organization/GroupDetailsByIDPage/GroupDetailsByIDPage.tsx +++ b/frontend/src/pages/organization/GroupDetailsByIDPage/GroupDetailsByIDPage.tsx @@ -27,6 +27,7 @@ import { usePopUp } from "@app/hooks/usePopUp"; import { GroupCreateUpdateModal } from "./components/GroupCreateUpdateModal"; import { GroupDetailsSection } from "./components/GroupDetailsSection"; import { GroupMembersSection } from "./components/GroupMembersSection"; +import { GroupProjectsSection } from "./components/GroupProjectsSection"; export enum TabSections { Member = "members", @@ -154,7 +155,10 @@ const Page = () => {
- +
+ + +
)} diff --git a/frontend/src/pages/organization/GroupDetailsByIDPage/components/AddGroupProjectModal.tsx b/frontend/src/pages/organization/GroupDetailsByIDPage/components/AddGroupProjectModal.tsx new file mode 100644 index 0000000000..c7040ec54a --- /dev/null +++ b/frontend/src/pages/organization/GroupDetailsByIDPage/components/AddGroupProjectModal.tsx @@ -0,0 +1,181 @@ +import { useState } from "react"; +import { faFolder, faMagnifyingGlass } from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; + +import { createNotification } from "@app/components/notifications"; +import { OrgPermissionCan } from "@app/components/permissions"; +import { + Button, + EmptyState, + Input, + Modal, + ModalContent, + Pagination, + Table, + TableContainer, + TableSkeleton, + TBody, + Td, + Th, + THead, + Tr +} from "@app/components/v2"; +import { OrgPermissionGroupActions, OrgPermissionSubjects } from "@app/context"; +import { getProjectTitle } from "@app/helpers/project"; +import { useDebounce, useResetPageHelper } from "@app/hooks"; +import { + useAddGroupToWorkspace as useAddProjectToGroup, + useListGroupProjects +} from "@app/hooks/api"; +import { EFilterReturnedProjects } from "@app/hooks/api/groups/types"; +import { ProjectType } from "@app/hooks/api/projects/types"; +import { UsePopUpState } from "@app/hooks/usePopUp"; + +type Props = { + popUp: UsePopUpState<["addGroupProjects"]>; + handlePopUpToggle: ( + popUpName: keyof UsePopUpState<["addGroupProjects"]>, + state?: boolean + ) => void; +}; + +export const AddGroupProjectModal = ({ popUp, handlePopUpToggle }: Props) => { + const [page, setPage] = useState(1); + const [perPage, setPerPage] = useState(10); + const [searchProjectFilter, setSearchProjectFilter] = useState(""); + const [debouncedSearch] = useDebounce(searchProjectFilter); + + const popUpData = popUp?.addGroupProjects?.data as { + groupId: string; + slug: string; + }; + + const offset = (page - 1) * perPage; + + const { data, isPending } = useListGroupProjects({ + id: popUpData?.groupId, + offset, + limit: perPage, + search: debouncedSearch, + filter: EFilterReturnedProjects.UNASSIGNED_PROJECTS + }); + + const { totalCount = 0 } = data ?? {}; + + useResetPageHelper({ + totalCount, + offset, + setPage + }); + + const { mutateAsync: addProjectToGroupMutateAsync, isPending: isAdding } = useAddProjectToGroup(); + + const handleAddProject = async (projectId: string, projectName: string) => { + if (!popUpData?.groupId) { + createNotification({ + text: "Some data is missing, please refresh the page and try again", + type: "error" + }); + return; + } + + await addProjectToGroupMutateAsync({ + groupId: popUpData.groupId, + projectId + }); + + createNotification({ + text: `Successfully assigned the group to project ${projectName}`, + type: "success" + }); + }; + + return ( + { + handlePopUpToggle("addGroupProjects", isOpen); + }} + > + + setSearchProjectFilter(e.target.value)} + leftIcon={} + placeholder="Search projects..." + /> + + + + + + + + + + {isPending && } + {!isPending && + data?.projects?.map((project) => { + return ( + + + + + + ); + })} + +
ProjectType +
+

{project.name}

+ {project.description && ( +

{project.description}

+ )} +
+

{getProjectTitle(project.type as ProjectType)}

+
+ + {(isAllowed) => { + return ( + + ); + }} + +
+ {!isPending && totalCount > 0 && ( + setPage(newPage)} + onChangePerPage={(newPerPage) => setPerPage(newPerPage)} + /> + )} + {!isPending && !data?.projects?.length && ( + + )} +
+
+
+ ); +}; diff --git a/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupMembersSection/GroupMembersSection.tsx b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupMembersSection/GroupMembersSection.tsx index c78b5404e7..024bc54d64 100644 --- a/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupMembersSection/GroupMembersSection.tsx +++ b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupMembersSection/GroupMembersSection.tsx @@ -46,7 +46,7 @@ export const GroupMembersSection = ({ groupId, groupSlug }: Props) => { return (
-

Group Members

+

Members

{(isAllowed) => ( , + data?: object + ) => void; +}; + +export const GroupProjectRow = ({ project, handlePopUpOpen }: Props) => { + return ( + + +

{project.name}

+ + +

{getProjectTitle(project.type as ProjectType)}

+ + + +

{new Date(project.joinedGroupAt).toLocaleDateString()}

+
+ + + + + + + + + + + + {(isAllowed) => { + return ( + } + onClick={() => + handlePopUpOpen("removeProjectFromGroup", { + projectId: project.id, + projectName: project.name + }) + } + isDisabled={!isAllowed} + > + Remove group from project + + ); + }} + + + + + + + ); +}; diff --git a/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/GroupProjectsSection.tsx b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/GroupProjectsSection.tsx new file mode 100644 index 0000000000..d6997c6cb7 --- /dev/null +++ b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/GroupProjectsSection.tsx @@ -0,0 +1,90 @@ +import { faPlus } from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; + +import { createNotification } from "@app/components/notifications"; +import { OrgPermissionCan } from "@app/components/permissions"; +import { DeleteActionModal, IconButton } from "@app/components/v2"; +import { OrgPermissionGroupActions, OrgPermissionSubjects } from "@app/context"; +import { useDeleteGroupFromWorkspace as useRemoveProjectFromGroup } from "@app/hooks/api"; +import { usePopUp } from "@app/hooks/usePopUp"; + +import { AddGroupProjectModal } from "../AddGroupProjectModal"; +import { GroupProjectsTable } from "./GroupProjectsTable"; + +type Props = { + groupId: string; + groupSlug: string; +}; + +export const GroupProjectsSection = ({ groupId, groupSlug }: Props) => { + const { popUp, handlePopUpOpen, handlePopUpToggle } = usePopUp([ + "addGroupProjects", + "removeProjectFromGroup" + ] as const); + + const { mutateAsync: removeProjectFromGroupMutateAsync } = useRemoveProjectFromGroup(); + + const handleRemoveProjectFromGroup = async (projectId: string, projectName: string) => { + await removeProjectFromGroupMutateAsync({ + groupId, + projectId + }); + + createNotification({ + text: `Successfully removed the group from project ${projectName}`, + type: "success" + }); + + handlePopUpToggle("removeProjectFromGroup", false); + }; + + return ( +
+
+

Projects

+ + {(isAllowed) => ( + { + handlePopUpOpen("addGroupProjects", { + groupId, + slug: groupSlug + }); + }} + > + + + )} + +
+
+ +
+ + handlePopUpToggle("removeProjectFromGroup", isOpen)} + deleteKey="confirm" + onDeleteApproved={() => { + const projectData = popUp?.removeProjectFromGroup?.data as { + projectId: string; + projectName: string; + }; + + return handleRemoveProjectFromGroup(projectData.projectId, projectData.projectName); + }} + /> +
+ ); +}; diff --git a/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/GroupProjectsTable.tsx b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/GroupProjectsTable.tsx new file mode 100644 index 0000000000..688e5a6923 --- /dev/null +++ b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/GroupProjectsTable.tsx @@ -0,0 +1,183 @@ +import { + faArrowDown, + faArrowUp, + faFolder, + faMagnifyingGlass, + faSearch +} from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; + +import { OrgPermissionCan } from "@app/components/permissions"; +import { + Button, + EmptyState, + IconButton, + Input, + Pagination, + Table, + TableContainer, + TableSkeleton, + TBody, + Th, + THead, + Tr +} from "@app/components/v2"; +import { OrgPermissionGroupActions, OrgPermissionSubjects } from "@app/context"; +import { + getUserTablePreference, + PreferenceKey, + setUserTablePreference +} from "@app/helpers/userTablePreferences"; +import { usePagination, useResetPageHelper } from "@app/hooks"; +import { useListGroupProjects } from "@app/hooks/api"; +import { OrderByDirection } from "@app/hooks/api/generic/types"; +import { EFilterReturnedProjects } from "@app/hooks/api/groups/types"; +import { UsePopUpState } from "@app/hooks/usePopUp"; + +import { GroupProjectRow } from "./GroupProjectRow"; + +type Props = { + groupId: string; + groupSlug: string; + handlePopUpOpen: ( + popUpName: keyof UsePopUpState<["removeProjectFromGroup", "addGroupProjects"]>, + data?: object + ) => void; +}; + +enum GroupProjectsOrderBy { + Name = "name" +} + +export const GroupProjectsTable = ({ groupId, groupSlug, handlePopUpOpen }: Props) => { + const { + search, + debouncedSearch, + setSearch, + setPage, + page, + perPage, + setPerPage, + offset, + orderDirection, + orderBy, + toggleOrderDirection + } = usePagination(GroupProjectsOrderBy.Name, { + initPerPage: getUserTablePreference("groupProjectsTable", PreferenceKey.PerPage, 20) + }); + + const handlePerPageChange = (newPerPage: number) => { + setPerPage(newPerPage); + setUserTablePreference("groupProjectsTable", PreferenceKey.PerPage, newPerPage); + }; + + const { data: groupMemberships, isPending } = useListGroupProjects({ + id: groupId, + offset, + limit: perPage, + search: debouncedSearch, + orderBy, + orderDirection, + filter: EFilterReturnedProjects.ASSIGNED_PROJECTS + }); + + const totalCount = groupMemberships?.totalCount ?? 0; + const isEmpty = !isPending && totalCount === 0; + const projects = groupMemberships?.projects ?? []; + + useResetPageHelper({ + totalCount, + offset, + setPage + }); + + return ( +
+ setSearch(e.target.value)} + leftIcon={} + placeholder="Search projects..." + /> + + + + + + + + + + + {isPending && } + {!isPending && + projects.map((project) => { + return ( + + ); + })} + +
+
+ Name + + + +
+
TypeAdded On +
+ {!isEmpty && ( + + )} + {isEmpty && ( + + )} + {isEmpty && ( + + {(isAllowed) => ( +
+ +
+ )} +
+ )} +
+
+ ); +}; diff --git a/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/index.tsx b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/index.tsx new file mode 100644 index 0000000000..d61ad6124e --- /dev/null +++ b/frontend/src/pages/organization/GroupDetailsByIDPage/components/GroupProjectsSection/index.tsx @@ -0,0 +1 @@ +export { GroupProjectsSection } from "./GroupProjectsSection"; diff --git a/frontend/src/pages/organization/GroupDetailsByIDPage/components/index.tsx b/frontend/src/pages/organization/GroupDetailsByIDPage/components/index.tsx index 003c479108..7a1d714542 100644 --- a/frontend/src/pages/organization/GroupDetailsByIDPage/components/index.tsx +++ b/frontend/src/pages/organization/GroupDetailsByIDPage/components/index.tsx @@ -1 +1,2 @@ export { GroupDetailsSection } from "./GroupDetailsSection"; +export { GroupProjectsSection } from "./GroupProjectsSection"; diff --git a/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliDeploymentMethod.tsx b/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliDeploymentMethod.tsx index 680ba901fb..4942bf2e22 100644 --- a/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliDeploymentMethod.tsx +++ b/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliDeploymentMethod.tsx @@ -183,7 +183,7 @@ export const GatewayCliDeploymentMethod = () => { }; const command = useMemo(() => { - const relayPart = relay?.id !== "_auto" ? ` --relay=${relay?.name || ""}` : ""; + const relayPart = relay?.id !== "_auto" ? ` --target-relay-name=${relay?.name || ""}` : ""; return `sudo infisical gateway start --name=${name}${relayPart} --domain=${siteURL} --token=${identityToken}`; }, [name, relay, identityToken, siteURL]); diff --git a/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliSystemdDeploymentMethod.tsx b/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliSystemdDeploymentMethod.tsx index 8b90719876..860590ffa5 100644 --- a/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliSystemdDeploymentMethod.tsx +++ b/frontend/src/pages/organization/NetworkingPage/components/GatewayTab/components/GatewayCliSystemdDeploymentMethod.tsx @@ -183,7 +183,7 @@ export const GatewayCliSystemdDeploymentMethod = () => { }; const installCommand = useMemo(() => { - const relayPart = relay?.id !== "_auto" ? ` --relay=${relay?.name || ""}` : ""; + const relayPart = relay?.id !== "_auto" ? ` --target-relay-name=${relay?.name || ""}` : ""; return `sudo infisical gateway systemd install --name=${name}${relayPart} --domain=${siteURL} --token=${identityToken}`; }, [name, relay, identityToken, siteURL]);