mirror of
https://github.com/Infisical/infisical.git
synced 2026-01-06 22:23:53 -05:00
Merge branch 'main' into fix/use-root-key-as-encryption-key
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -74,3 +74,4 @@ cli/test/infisical-merge
|
||||
backend/bdd/.bdd-infisical-bootstrap-result.json
|
||||
|
||||
/npm/bin
|
||||
__pycache__
|
||||
|
||||
@@ -54,4 +54,6 @@ k8-operator/config/samples/universalAuthIdentitySecret.yaml:generic-api-key:8
|
||||
docs/integrations/app-connections/redis.mdx:generic-api-key:80
|
||||
backend/src/ee/services/app-connections/chef/chef-connection-fns.ts:private-key:42
|
||||
docs/documentation/platform/pki/enrollment-methods/api.mdx:generic-api-key:93
|
||||
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139
|
||||
docs/documentation/platform/pki/enrollment-methods/api.mdx:private-key:139
|
||||
docs/documentation/platform/pki/certificate-syncs/aws-secrets-manager.mdx:private-key:62
|
||||
docs/documentation/platform/pki/certificate-syncs/chef.mdx:private-key:61
|
||||
|
||||
@@ -3,6 +3,7 @@ import os
|
||||
|
||||
import pathlib
|
||||
import typing
|
||||
from copy import deepcopy
|
||||
|
||||
import httpx
|
||||
from behave.runner import Context
|
||||
@@ -185,28 +186,33 @@ def bootstrap_infisical(context: Context):
|
||||
|
||||
|
||||
def before_all(context: Context):
|
||||
base_vars = {
|
||||
"BASE_URL": BASE_URL,
|
||||
"PEBBLE_URL": PEBBLE_URL,
|
||||
}
|
||||
if BOOTSTRAP_INFISICAL:
|
||||
details = bootstrap_infisical(context)
|
||||
context.vars = {
|
||||
"BASE_URL": BASE_URL,
|
||||
"PEBBLE_URL": PEBBLE_URL,
|
||||
vars = base_vars | {
|
||||
"PROJECT_ID": details["project"]["id"],
|
||||
"CERT_CA_ID": details["ca"]["id"],
|
||||
"CERT_TEMPLATE_ID": details["cert_template"]["id"],
|
||||
"AUTH_TOKEN": details["auth_token"],
|
||||
}
|
||||
else:
|
||||
context.vars = {
|
||||
"BASE_URL": BASE_URL,
|
||||
"PEBBLE_URL": PEBBLE_URL,
|
||||
vars = base_vars | {
|
||||
"PROJECT_ID": PROJECT_ID,
|
||||
"CERT_CA_ID": CERT_CA_ID,
|
||||
"CERT_TEMPLATE_ID": CERT_TEMPLATE_ID,
|
||||
"AUTH_TOKEN": AUTH_TOKEN,
|
||||
}
|
||||
context._initial_vars = vars
|
||||
context.http_client = httpx.Client(base_url=BASE_URL)
|
||||
|
||||
|
||||
def before_scenario(context: Context, scenario: typing.Any):
|
||||
context.vars = deepcopy(context._initial_vars)
|
||||
|
||||
|
||||
def after_scenario(context: Context, scenario: typing.Any):
|
||||
if hasattr(context, "web_server"):
|
||||
context.web_server.shutdown_and_server_close()
|
||||
|
||||
@@ -221,7 +221,6 @@ Feature: Access Control
|
||||
| order | .authorizations[0].uri | auth_uri | {auth_uri} | |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | {} |
|
||||
|
||||
|
||||
Scenario Outline: URL mismatch
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
@@ -271,3 +270,52 @@ Feature: Access Control
|
||||
| order | .authorizations[0].uri | auth_uri | {auth_uri} | https://example.com/acmes/auths/FOOBAR | URL mismatch in the protected header |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | BAD | Invalid URL in the protected header |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | https://example.com/acmes/challenges/FOOBAR | URL mismatch in the protected header |
|
||||
|
||||
Scenario Outline: Send KID and JWK in the same time
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri with jq "capture("/(?<id>[^/]+)$") | .id" as account_id
|
||||
When I create certificate signing request as csr
|
||||
Then I add names to certificate signing request csr
|
||||
"""
|
||||
{
|
||||
"COMMON_NAME": "localhost"
|
||||
}
|
||||
"""
|
||||
Then I create a RSA private key pair as cert_key
|
||||
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
|
||||
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
|
||||
And I peak and memorize the next nonce as nonce_value
|
||||
And I memorize <src_var> with jq "<jq>" as <dest_var>
|
||||
When I send a raw ACME request to "<url>"
|
||||
"""
|
||||
{
|
||||
"protected": {
|
||||
"alg": "RS256",
|
||||
"nonce": "{nonce_value}",
|
||||
"url": "<url>",
|
||||
"kid": "{acme_account.uri}",
|
||||
"jwk": {
|
||||
"n": "mmEWxUv2lUYDZe_M2FXJ_WDXgHoEG7PVvg-dfz1STzyMwx0qvM66KMenXSyVA0r-_Ssb6p8VexSWGOFKskM4ryKUihn2KNH5e8nXZBqzqYeKQ8vqaCdaWzTxFI1dg0xhk0CWptkZHxpRpLalztFJ1Pq7L2qvQOM2YT7wPYbwQhpaSiVNXAb1W4FwAPyC04v1mHehvST-esaDT7j_5-eU5cCcmyi4_g5nBawcinOjj5o3VCg4X8UjK--AjhAyYHx1nRMr-7xk4x-0VIpQ_OODjLB3WzN8s1YEb0Jx5Bv1JyeCw35zahqs3fAFyRje-p5ENk9NCxfz5x9ZGkszkkNt0Q",
|
||||
"e": "AQAB",
|
||||
"kty": "RSA"
|
||||
}
|
||||
},
|
||||
"payload": {}
|
||||
}
|
||||
"""
|
||||
Then the value response.status_code should be equal to 400
|
||||
And the value response with jq ".status" should be equal to 400
|
||||
And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:malformed"
|
||||
And the value response with jq ".detail" should be equal to "Both JWK and KID are provided in the protected header"
|
||||
|
||||
Examples: Endpoints
|
||||
| src_var | jq | dest_var | url |
|
||||
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/{account_id}/orders |
|
||||
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order |
|
||||
| order | . | not_used | {order.uri} |
|
||||
| order | . | not_used | {order.uri}/finalize |
|
||||
| order | . | not_used | {order.uri}/certificate |
|
||||
| order | .authorizations[0].uri | auth_uri | {auth_uri} |
|
||||
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} |
|
||||
|
||||
@@ -6,13 +6,32 @@ Feature: Account
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And the value acme_account.uri with jq "." should match pattern {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/(.+)
|
||||
|
||||
Scenario: Create a new account with the same key pair twice
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri as kid
|
||||
And I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account2
|
||||
And the value error.__class__.__name__ should be equal to "ConflictError"
|
||||
And the value error.location should be equal to "{kid}"
|
||||
|
||||
Scenario: Find an existing account
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri as account_uri
|
||||
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And the value acme_account.uri should be equal to "{account_uri}"
|
||||
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as retrieved_account
|
||||
And the value retrieved_account.uri should be equal to "{account_uri}"
|
||||
|
||||
# Note: This is a very special case for cert-manager.
|
||||
Scenario: Create a new account with EAB then retrieve it without EAB
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
|
||||
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
|
||||
And I memorize acme_account.uri as account_uri
|
||||
And I find the existing ACME account without EAB as retrieved_account
|
||||
And the value error with should be absent
|
||||
And the value retrieved_account.uri should be equal to "{account_uri}"
|
||||
|
||||
Scenario: Create a new account without EAB
|
||||
Given I have an ACME cert profile as "acme_profile"
|
||||
|
||||
@@ -9,6 +9,9 @@ Feature: Directory
|
||||
{
|
||||
"newNonce": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-nonce",
|
||||
"newAccount": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-account",
|
||||
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order"
|
||||
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order",
|
||||
"meta": {
|
||||
"externalAccountRequired": true
|
||||
}
|
||||
}
|
||||
"""
|
||||
@@ -387,6 +387,9 @@ def register_account_with_eab(
|
||||
):
|
||||
acme_client = context.acme_client
|
||||
account_public_key = acme_client.net.key.public_key()
|
||||
if not only_return_existing:
|
||||
# clear the account in case if we want to register twice
|
||||
acme_client.net.account = None
|
||||
if hasattr(context, "alt_eab_url"):
|
||||
eab_directory = messages.Directory.from_json(
|
||||
{"newAccount": context.alt_eab_url}
|
||||
@@ -406,8 +409,14 @@ def register_account_with_eab(
|
||||
only_return_existing=only_return_existing,
|
||||
)
|
||||
try:
|
||||
context.vars[account_var] = acme_client.new_account(registration)
|
||||
if not only_return_existing:
|
||||
context.vars[account_var] = acme_client.new_account(registration)
|
||||
else:
|
||||
context.vars[account_var] = acme_client.query_registration(
|
||||
acme_client.net.account
|
||||
)
|
||||
except Exception as exp:
|
||||
logger.error(f"Failed to register: {exp}", exc_info=True)
|
||||
context.vars["error"] = exp
|
||||
|
||||
|
||||
@@ -434,6 +443,17 @@ def step_impl(context: Context, email: str, kid: str, secret: str, account_var:
|
||||
)
|
||||
|
||||
|
||||
@then("I find the existing ACME account without EAB as {account_var}")
|
||||
def step_impl(context: Context, account_var: str):
|
||||
acme_client = context.acme_client
|
||||
# registration = messages.RegistrationResource.from_json(dict(uri=""))
|
||||
registration = acme_client.net.account
|
||||
try:
|
||||
context.vars[account_var] = acme_client.query_registration(registration)
|
||||
except Exception as exp:
|
||||
context.vars["error"] = exp
|
||||
|
||||
|
||||
@then("I register a new ACME account with email {email} without EAB")
|
||||
def step_impl(context: Context, email: str):
|
||||
acme_client = context.acme_client
|
||||
@@ -600,6 +620,19 @@ def step_impl(context: Context, var_path: str, jq_query: str):
|
||||
)
|
||||
|
||||
|
||||
@then("the value {var_path} with should be absent")
|
||||
def step_impl(context: Context, var_path: str):
|
||||
try:
|
||||
value = eval_var(context, var_path)
|
||||
except Exception as exp:
|
||||
if isinstance(exp, KeyError):
|
||||
return
|
||||
raise
|
||||
assert False, (
|
||||
f"value at {var_path!r} should be absent, but we got this instead: {value!r}"
|
||||
)
|
||||
|
||||
|
||||
@then('the value {var_path} with jq "{jq_query}" should be equal to {expected}')
|
||||
def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
|
||||
value, result = apply_value_with_jq(
|
||||
@@ -615,13 +648,14 @@ def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
|
||||
|
||||
@then('the value {var_path} with jq "{jq_query}" should match pattern {regex}')
|
||||
def step_impl(context: Context, var_path: str, jq_query: str, regex: str):
|
||||
actual_regex = replace_vars(regex, context.vars)
|
||||
value, result = apply_value_with_jq(
|
||||
context=context,
|
||||
var_path=var_path,
|
||||
jq_query=jq_query,
|
||||
)
|
||||
assert re.match(replace_vars(regex, context.vars), result), (
|
||||
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {regex!r}"
|
||||
assert re.match(actual_regex, result), (
|
||||
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {actual_regex!r}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from josepy import JSONObjectWithFields
|
||||
|
||||
ACC_KEY_BITS = 2048
|
||||
ACC_KEY_PUBLIC_EXPONENT = 65537
|
||||
NOCK_API_PREFIX = "/api/__bdd_nock__"
|
||||
logger = logging.getLogger(__name__)
|
||||
faker = Faker()
|
||||
|
||||
@@ -265,7 +266,7 @@ def x509_cert_to_dict(cert: x509.Certificate) -> dict:
|
||||
def define_nock(context: Context, definitions: list[dict]):
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
response = context.http_client.post(
|
||||
"/api/v1/bdd-nock/define",
|
||||
f"{NOCK_API_PREFIX}/define",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json=dict(definitions=definitions),
|
||||
)
|
||||
@@ -275,7 +276,7 @@ def define_nock(context: Context, definitions: list[dict]):
|
||||
def restore_nock(context: Context):
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
response = context.http_client.post(
|
||||
"/api/v1/bdd-nock/restore",
|
||||
f"{NOCK_API_PREFIX}/restore",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json=dict(),
|
||||
)
|
||||
@@ -285,7 +286,7 @@ def restore_nock(context: Context):
|
||||
def clean_all_nock(context: Context):
|
||||
jwt_token = context.vars["AUTH_TOKEN"]
|
||||
response = context.http_client.post(
|
||||
"/api/v1/bdd-nock/clean-all",
|
||||
f"{NOCK_API_PREFIX}/clean-all",
|
||||
headers=dict(authorization="Bearer {}".format(jwt_token)),
|
||||
json=dict(),
|
||||
)
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
|
||||
const createPolicy = async (dto: {
|
||||
name: string;
|
||||
secretPath: string;
|
||||
approvers: { type: ApproverType.User; id: string }[];
|
||||
approvals: number;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
@@ -27,7 +32,7 @@ describe("Secret approval policy router", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [{id:seedData1.id, type: ApproverType.User}],
|
||||
approvers: [{ id: seedData1.id, type: ApproverType.User }],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"watch": [
|
||||
"src"
|
||||
],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
|
||||
}
|
||||
"exec": "tsx --tsconfig=./tsconfig.dev.json --inspect=0.0.0.0:9229 ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
|
||||
}
|
||||
@@ -32,7 +32,7 @@
|
||||
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
|
||||
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts --config tsconfig.dev.json | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
@@ -266,4 +266,4 @@
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.24.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
|
||||
@@ -14,13 +14,16 @@ export async function up(knex: Knex): Promise<void> {
|
||||
if (rows.length > 0) {
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
const ids = batch.map((row) => row.id);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretApprovalPolicy)
|
||||
.whereIn(
|
||||
"id",
|
||||
batch.map((row) => row.id)
|
||||
)
|
||||
.update({ shouldCheckSecretPermission: true });
|
||||
await knex.raw(
|
||||
`
|
||||
UPDATE ??
|
||||
SET ?? = true
|
||||
WHERE ?? IN (${ids.map(() => "?").join(",")})
|
||||
`,
|
||||
[TableName.SecretApprovalPolicy, "shouldCheckSecretPermission", "id", ids]
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
const CONSTRAINT_NAME = "unique_pki_acme_account_public_key_and_profile_id";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
|
||||
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
|
||||
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
|
||||
|
||||
if (hasProfileId && hasPublicKeyThumbprint) {
|
||||
await knex.schema.alterTable(TableName.PkiAcmeAccount, (table) => {
|
||||
table.unique(["profileId", "publicKeyThumbprint"], { indexName: CONSTRAINT_NAME });
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
|
||||
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
|
||||
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
|
||||
|
||||
await knex.schema.alterTable(TableName.PkiAcmeAccount, async () => {
|
||||
if (hasProfileId && hasPublicKeyThumbprint) {
|
||||
await dropConstraintIfExists(TableName.PkiAcmeAccount, CONSTRAINT_NAME, knex);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("shouldCheckSecretPermission");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("shouldCheckSecretPermission").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,7 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true),
|
||||
shouldCheckSecretPermission: z.boolean().nullable().optional()
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
|
||||
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { GroupsSchema, OrgMembershipRole, ProjectsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import {
|
||||
EFilterReturnedProjects,
|
||||
EFilterReturnedUsers,
|
||||
EGroupProjectsOrderBy
|
||||
} from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS } from "@app/lib/api-docs";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -11,6 +17,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
@@ -40,6 +49,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/:id",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
@@ -69,6 +81,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
@@ -93,6 +108,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/:id",
|
||||
method: "PATCH",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
@@ -128,6 +146,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/:id",
|
||||
method: "DELETE",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
@@ -155,6 +176,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/users",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
@@ -163,7 +187,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
id: z.string().trim().describe(GROUPS.LIST_USERS.id)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
@@ -203,9 +227,72 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/projects",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Groups],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.LIST_PROJECTS.id)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0).describe(GROUPS.LIST_PROJECTS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_PROJECTS.limit),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_PROJECTS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedProjects).optional().describe(GROUPS.LIST_PROJECTS.filterProjects),
|
||||
orderBy: z
|
||||
.nativeEnum(EGroupProjectsOrderBy)
|
||||
.default(EGroupProjectsOrderBy.Name)
|
||||
.describe(GROUPS.LIST_PROJECTS.orderBy),
|
||||
orderDirection: z
|
||||
.nativeEnum(OrderByDirection)
|
||||
.default(OrderByDirection.ASC)
|
||||
.describe(GROUPS.LIST_PROJECTS.orderDirection)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
projects: ProjectsSchema.pick({
|
||||
id: true,
|
||||
name: true,
|
||||
slug: true,
|
||||
description: true,
|
||||
type: true
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
)
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { projects, totalCount } = await server.services.group.listGroupProjects({
|
||||
id: req.params.id,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { projects, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:id/users/:username",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
@@ -241,6 +328,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id/users/:username",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
|
||||
@@ -305,8 +305,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean(),
|
||||
shouldCheckSecretPermission: z.boolean().nullable().optional()
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
|
||||
@@ -27,6 +27,17 @@ export const getChefServerUrl = async (serverUrl?: string) => {
|
||||
return chefServerUrl;
|
||||
};
|
||||
|
||||
const buildSecureUrl = (baseUrl: string, path: string): string => {
|
||||
try {
|
||||
const url = new URL(path, baseUrl);
|
||||
return url.toString();
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid URL construction parameters"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Helper to ensure private key is in proper PEM format
|
||||
const formatPrivateKey = (key: string): string => {
|
||||
let formattedKey = key.trim();
|
||||
@@ -138,7 +149,8 @@ export const validateChefConnectionCredentials = async (config: TChefConnectionC
|
||||
|
||||
const headers = getChefAuthHeaders("GET", path, "", inputCredentials.userName, inputCredentials.privateKey);
|
||||
|
||||
await request.get(`${hostServerUrl}${path}`, {
|
||||
const secureUrl = buildSecureUrl(hostServerUrl, path);
|
||||
await request.get(secureUrl, {
|
||||
headers
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
@@ -168,7 +180,8 @@ export const listChefDataBags = async (appConnection: TChefConnection): Promise<
|
||||
|
||||
const headers = getChefAuthHeaders("GET", path, body, userName, privateKey);
|
||||
|
||||
const res = await request.get<Record<string, string>>(`${hostServerUrl}${path}`, {
|
||||
const secureUrl = buildSecureUrl(hostServerUrl, path);
|
||||
const res = await request.get<Record<string, string>>(secureUrl, {
|
||||
headers
|
||||
});
|
||||
|
||||
@@ -203,7 +216,8 @@ export const listChefDataBagItems = async (
|
||||
|
||||
const headers = getChefAuthHeaders("GET", path, body, userName, privateKey);
|
||||
|
||||
const res = await request.get<Record<string, string>>(`${hostServerUrl}${path}`, {
|
||||
const secureUrl = buildSecureUrl(hostServerUrl, path);
|
||||
const res = await request.get<Record<string, string>>(secureUrl, {
|
||||
headers
|
||||
});
|
||||
|
||||
@@ -238,7 +252,8 @@ export const getChefDataBagItem = async ({
|
||||
|
||||
const headers = getChefAuthHeaders("GET", path, body, userName, privateKey);
|
||||
|
||||
const res = await request.get<TChefDataBagItemContent>(`${hostServerUrl}${path}`, {
|
||||
const secureUrl = buildSecureUrl(hostServerUrl, path);
|
||||
const res = await request.get<TChefDataBagItemContent>(secureUrl, {
|
||||
headers
|
||||
});
|
||||
|
||||
@@ -255,6 +270,38 @@ export const getChefDataBagItem = async ({
|
||||
}
|
||||
};
|
||||
|
||||
export const createChefDataBagItem = async ({
|
||||
serverUrl,
|
||||
userName,
|
||||
privateKey,
|
||||
orgName,
|
||||
dataBagName,
|
||||
data
|
||||
}: Omit<TUpdateChefDataBagItem, "dataBagItemName">): Promise<void> => {
|
||||
try {
|
||||
const path = `/organizations/${orgName}/data/${dataBagName}`;
|
||||
const body = JSON.stringify(data);
|
||||
|
||||
const hostServerUrl = await getChefServerUrl(serverUrl);
|
||||
|
||||
const headers = getChefAuthHeaders("POST", path, body, userName, privateKey);
|
||||
|
||||
const secureUrl = buildSecureUrl(hostServerUrl, path);
|
||||
await request.post(secureUrl, data, {
|
||||
headers
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create Chef data bag item: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: "Unable to create Chef data bag item"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const updateChefDataBagItem = async ({
|
||||
serverUrl,
|
||||
userName,
|
||||
@@ -272,7 +319,8 @@ export const updateChefDataBagItem = async ({
|
||||
|
||||
const headers = getChefAuthHeaders("PUT", path, body, userName, privateKey);
|
||||
|
||||
await request.put(`${hostServerUrl}${path}`, data, {
|
||||
const secureUrl = buildSecureUrl(hostServerUrl, path);
|
||||
await request.put(secureUrl, data, {
|
||||
headers
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -286,3 +334,35 @@ export const updateChefDataBagItem = async ({
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const removeChefDataBagItem = async ({
|
||||
serverUrl,
|
||||
userName,
|
||||
privateKey,
|
||||
orgName,
|
||||
dataBagName,
|
||||
dataBagItemName
|
||||
}: Omit<TUpdateChefDataBagItem, "data">): Promise<void> => {
|
||||
try {
|
||||
const path = `/organizations/${orgName}/data/${dataBagName}/${dataBagItemName}`;
|
||||
const body = "";
|
||||
|
||||
const hostServerUrl = await getChefServerUrl(serverUrl);
|
||||
|
||||
const headers = getChefAuthHeaders("DELETE", path, body, userName, privateKey);
|
||||
|
||||
const secureUrl = buildSecureUrl(hostServerUrl, path);
|
||||
await request.delete(secureUrl, {
|
||||
headers
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to remove Chef data bag item: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: "Unable to remove Chef data bag item"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4,8 +4,9 @@ import { TDbClient } from "@app/db";
|
||||
import { AccessScope, TableName, TGroups } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
|
||||
import { EFilterReturnedUsers } from "./group-types";
|
||||
import { EFilterReturnedProjects, EFilterReturnedUsers, EGroupProjectsOrderBy } from "./group-types";
|
||||
|
||||
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
|
||||
|
||||
@@ -166,6 +167,89 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findAllGroupProjects = async ({
|
||||
orgId,
|
||||
groupId,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
filter,
|
||||
orderBy,
|
||||
orderDirection
|
||||
}: {
|
||||
orgId: string;
|
||||
groupId: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedProjects;
|
||||
orderBy?: EGroupProjectsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
}) => {
|
||||
try {
|
||||
const query = db
|
||||
.replicaNode()(TableName.Project)
|
||||
.where(`${TableName.Project}.orgId`, orgId)
|
||||
.leftJoin(TableName.Membership, (bd) => {
|
||||
bd.on(`${TableName.Project}.id`, "=", `${TableName.Membership}.scopeProjectId`)
|
||||
.andOn(`${TableName.Membership}.actorGroupId`, "=", db.raw("?", [groupId]))
|
||||
.andOn(`${TableName.Membership}.scope`, "=", db.raw("?", [AccessScope.Project]));
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.Project),
|
||||
db.ref("name").withSchema(TableName.Project),
|
||||
db.ref("slug").withSchema(TableName.Project),
|
||||
db.ref("description").withSchema(TableName.Project),
|
||||
db.ref("type").withSchema(TableName.Project),
|
||||
db.ref("createdAt").withSchema(TableName.Membership).as("joinedGroupAt"),
|
||||
db.raw(`count(*) OVER() as "totalCount"`)
|
||||
)
|
||||
.offset(offset ?? 0);
|
||||
|
||||
if (orderBy) {
|
||||
void query.orderByRaw(
|
||||
`LOWER(${TableName.Project}.??) ${orderDirection === OrderByDirection.ASC ? "asc" : "desc"}`,
|
||||
[orderBy]
|
||||
);
|
||||
}
|
||||
|
||||
if (limit) {
|
||||
void query.limit(limit);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
void query.andWhereRaw(
|
||||
`CONCAT_WS(' ', "${TableName.Project}"."name", "${TableName.Project}"."slug", "${TableName.Project}"."description") ilike ?`,
|
||||
[`%${search}%`]
|
||||
);
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case EFilterReturnedProjects.ASSIGNED_PROJECTS:
|
||||
void query.whereNotNull(`${TableName.Membership}.id`);
|
||||
break;
|
||||
case EFilterReturnedProjects.UNASSIGNED_PROJECTS:
|
||||
void query.whereNull(`${TableName.Membership}.id`);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
const projects = await query;
|
||||
|
||||
return {
|
||||
projects: projects.map(({ joinedGroupAt, ...project }) => ({
|
||||
...project,
|
||||
joinedGroupAt
|
||||
})),
|
||||
// @ts-expect-error col select is raw and not strongly typed
|
||||
totalCount: Number(projects?.[0]?.totalCount ?? 0)
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all group projects" });
|
||||
}
|
||||
};
|
||||
|
||||
const findGroupsByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.Groups)
|
||||
@@ -230,6 +314,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
findGroups,
|
||||
findByOrgId,
|
||||
findAllGroupPossibleMembers,
|
||||
findAllGroupProjects,
|
||||
findGroupsByProjectId,
|
||||
findById,
|
||||
findOne
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
TCreateGroupDTO,
|
||||
TDeleteGroupDTO,
|
||||
TGetGroupByIdDTO,
|
||||
TListGroupProjectsDTO,
|
||||
TListGroupUsersDTO,
|
||||
TRemoveUserFromGroupDTO,
|
||||
TUpdateGroupDTO
|
||||
@@ -34,7 +35,14 @@ type TGroupServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findUserByUsername">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
"create" | "findOne" | "update" | "delete" | "findAllGroupPossibleMembers" | "findById" | "transaction"
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "update"
|
||||
| "delete"
|
||||
| "findAllGroupPossibleMembers"
|
||||
| "findById"
|
||||
| "transaction"
|
||||
| "findAllGroupProjects"
|
||||
>;
|
||||
membershipGroupDAL: Pick<TMembershipGroupDALFactory, "find" | "findOne" | "create">;
|
||||
membershipRoleDAL: Pick<TMembershipRoleDALFactory, "create" | "delete">;
|
||||
@@ -367,6 +375,55 @@ export const groupServiceFactory = ({
|
||||
return { users: members, totalCount };
|
||||
};
|
||||
|
||||
const listGroupProjects = async ({
|
||||
id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
filter,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TListGroupProjectsDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission({
|
||||
scope: OrganizationActionScope.Any,
|
||||
actor,
|
||||
actorId,
|
||||
orgId: actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
id
|
||||
});
|
||||
|
||||
if (!group)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const { projects, totalCount } = await groupDAL.findAllGroupProjects({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
filter,
|
||||
orderBy,
|
||||
orderDirection
|
||||
});
|
||||
|
||||
return { projects, totalCount };
|
||||
};
|
||||
|
||||
const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
@@ -535,6 +592,7 @@ export const groupServiceFactory = ({
|
||||
updateGroup,
|
||||
deleteGroup,
|
||||
listGroupUsers,
|
||||
listGroupProjects,
|
||||
addUserToGroup,
|
||||
removeUserFromGroup,
|
||||
getGroupById
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { TGroups } from "@app/db/schemas";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { TGenericPermission } from "@app/lib/types";
|
||||
import { OrderByDirection, TGenericPermission } from "@app/lib/types";
|
||||
import { TMembershipGroupDALFactory } from "@app/services/membership-group/membership-group-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@@ -42,6 +42,16 @@ export type TListGroupUsersDTO = {
|
||||
filter?: EFilterReturnedUsers;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListGroupProjectsDTO = {
|
||||
id: string;
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedProjects;
|
||||
orderBy?: EGroupProjectsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
|
||||
projectId: string;
|
||||
};
|
||||
@@ -111,3 +121,12 @@ export enum EFilterReturnedUsers {
|
||||
EXISTING_MEMBERS = "existingMembers",
|
||||
NON_MEMBERS = "nonMembers"
|
||||
}
|
||||
|
||||
export enum EFilterReturnedProjects {
|
||||
ASSIGNED_PROJECTS = "assignedProjects",
|
||||
UNASSIGNED_PROJECTS = "unassignedProjects"
|
||||
}
|
||||
|
||||
export enum EGroupProjectsOrderBy {
|
||||
Name = "name"
|
||||
}
|
||||
|
||||
@@ -39,3 +39,9 @@ export const getDefaultOnPremFeatures = () => {
|
||||
};
|
||||
|
||||
export const setupLicenseRequestWithStore = () => {};
|
||||
|
||||
export const getLicenseKeyConfig = () => {
|
||||
return {
|
||||
isValid: false
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,13 +1,56 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getConfig, TEnvConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TFeatureSet } from "./license-types";
|
||||
import { LicenseType, TFeatureSet, TLicenseKeyConfig, TOfflineLicenseContents } from "./license-types";
|
||||
|
||||
export const isOfflineLicenseKey = (licenseKey: string): boolean => {
|
||||
try {
|
||||
const contents = JSON.parse(Buffer.from(licenseKey, "base64").toString("utf8")) as TOfflineLicenseContents;
|
||||
|
||||
return "signature" in contents && "license" in contents;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const getLicenseKeyConfig = (
|
||||
config?: Pick<TEnvConfig, "LICENSE_KEY" | "LICENSE_KEY_OFFLINE">
|
||||
): TLicenseKeyConfig => {
|
||||
const cfg = config || getConfig();
|
||||
|
||||
if (!cfg) {
|
||||
return { isValid: false };
|
||||
}
|
||||
|
||||
const licenseKey = cfg.LICENSE_KEY;
|
||||
|
||||
if (licenseKey) {
|
||||
if (isOfflineLicenseKey(licenseKey)) {
|
||||
return { isValid: true, licenseKey, type: LicenseType.Offline };
|
||||
}
|
||||
|
||||
return { isValid: true, licenseKey, type: LicenseType.Online };
|
||||
}
|
||||
|
||||
const offlineLicenseKey = cfg.LICENSE_KEY_OFFLINE;
|
||||
|
||||
// backwards compatibility
|
||||
if (offlineLicenseKey) {
|
||||
if (isOfflineLicenseKey(offlineLicenseKey)) {
|
||||
return { isValid: true, licenseKey: offlineLicenseKey, type: LicenseType.Offline };
|
||||
}
|
||||
|
||||
return { isValid: false };
|
||||
}
|
||||
|
||||
return { isValid: false };
|
||||
};
|
||||
|
||||
export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
_id: null,
|
||||
|
||||
@@ -22,9 +22,10 @@ import { OrgPermissionBillingActions, OrgPermissionSubjects } from "../permissio
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import { getDefaultOnPremFeatures, getLicenseKeyConfig, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
InstanceType,
|
||||
LicenseType,
|
||||
TAddOrgPmtMethodDTO,
|
||||
TAddOrgTaxIdDTO,
|
||||
TCreateOrgPortalSession,
|
||||
@@ -77,6 +78,7 @@ export const licenseServiceFactory = ({
|
||||
let instanceType = InstanceType.OnPrem;
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
let selfHostedLicense: TOfflineLicense | null = null;
|
||||
const licenseKeyConfig = getLicenseKeyConfig(envConfig);
|
||||
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
envConfig.LICENSE_SERVER_URL || "",
|
||||
@@ -85,10 +87,13 @@ export const licenseServiceFactory = ({
|
||||
envConfig.INTERNAL_REGION
|
||||
);
|
||||
|
||||
const onlineLicenseKey =
|
||||
licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online ? licenseKeyConfig.licenseKey : "";
|
||||
|
||||
const licenseServerOnPremApi = setupLicenseRequestWithStore(
|
||||
envConfig.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_ON_PREM_LOGIN,
|
||||
envConfig.LICENSE_KEY || "",
|
||||
onlineLicenseKey,
|
||||
envConfig.INTERNAL_REGION
|
||||
);
|
||||
|
||||
@@ -131,7 +136,7 @@ export const licenseServiceFactory = ({
|
||||
return;
|
||||
}
|
||||
|
||||
if (envConfig.LICENSE_KEY) {
|
||||
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online) {
|
||||
const token = await licenseServerOnPremApi.refreshLicense();
|
||||
if (token) {
|
||||
await syncLicenseKeyOnPremFeatures(true);
|
||||
@@ -142,10 +147,10 @@ export const licenseServiceFactory = ({
|
||||
return;
|
||||
}
|
||||
|
||||
if (envConfig.LICENSE_KEY_OFFLINE) {
|
||||
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline) {
|
||||
let isValidOfflineLicense = true;
|
||||
const contents: TOfflineLicenseContents = JSON.parse(
|
||||
Buffer.from(envConfig.LICENSE_KEY_OFFLINE, "base64").toString("utf8")
|
||||
Buffer.from(licenseKeyConfig.licenseKey, "base64").toString("utf8")
|
||||
);
|
||||
const isVerified = await verifyOfflineLicense(JSON.stringify(contents.license), contents.signature);
|
||||
|
||||
@@ -184,7 +189,7 @@ export const licenseServiceFactory = ({
|
||||
};
|
||||
|
||||
const initializeBackgroundSync = async () => {
|
||||
if (envConfig.LICENSE_KEY) {
|
||||
if (licenseKeyConfig?.isValid && licenseKeyConfig?.type === LicenseType.Online) {
|
||||
logger.info("Setting up background sync process for refresh onPremFeatures");
|
||||
const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures);
|
||||
job.start();
|
||||
|
||||
@@ -136,3 +136,18 @@ export type TDelOrgTaxIdDTO = TOrgPermission & { taxId: string };
|
||||
export type TOrgInvoiceDTO = TOrgPermission;
|
||||
|
||||
export type TOrgLicensesDTO = TOrgPermission;
|
||||
|
||||
export enum LicenseType {
|
||||
Offline = "offline",
|
||||
Online = "online"
|
||||
}
|
||||
|
||||
export type TLicenseKeyConfig =
|
||||
| {
|
||||
isValid: false;
|
||||
}
|
||||
| {
|
||||
isValid: true;
|
||||
licenseKey: string;
|
||||
type: LicenseType;
|
||||
};
|
||||
|
||||
@@ -74,7 +74,12 @@ export const pkiAcmeChallengeServiceFactory = ({
|
||||
// Notice: well, we are in a transaction, ideally we should not hold transaction and perform
|
||||
// a long running operation for long time. But assuming we are not performing a tons of
|
||||
// challenge validation at the same time, it should be fine.
|
||||
const challengeResponse = await fetch(challengeUrl, { signal: AbortSignal.timeout(timeoutMs) });
|
||||
const challengeResponse = await fetch(challengeUrl, {
|
||||
// In case if we override the host in the development mode, still provide the original host in the header
|
||||
// to help the upstream server to validate the request
|
||||
headers: { Host: host },
|
||||
signal: AbortSignal.timeout(timeoutMs)
|
||||
});
|
||||
if (challengeResponse.status !== 200) {
|
||||
throw new AcmeIncorrectResponseError({
|
||||
message: `ACME challenge response is not 200: ${challengeResponse.status}`
|
||||
|
||||
@@ -58,7 +58,15 @@ export const GetAcmeDirectoryResponseSchema = z.object({
|
||||
newNonce: z.string(),
|
||||
newAccount: z.string(),
|
||||
newOrder: z.string(),
|
||||
revokeCert: z.string().optional()
|
||||
revokeCert: z.string().optional(),
|
||||
meta: z
|
||||
.object({
|
||||
termsOfService: z.string().optional(),
|
||||
website: z.string().optional(),
|
||||
caaIdentities: z.array(z.string()).optional(),
|
||||
externalAccountRequired: z.boolean().optional()
|
||||
})
|
||||
.optional()
|
||||
});
|
||||
|
||||
// New Account payload schema
|
||||
|
||||
@@ -206,6 +206,9 @@ export const pkiAcmeServiceFactory = ({
|
||||
const { protectedHeader: rawProtectedHeader, payload: rawPayload } = result;
|
||||
try {
|
||||
const protectedHeader = ProtectedHeaderSchema.parse(rawProtectedHeader);
|
||||
if (protectedHeader.jwk && protectedHeader.kid) {
|
||||
throw new AcmeMalformedError({ message: "Both JWK and KID are provided in the protected header" });
|
||||
}
|
||||
const parsedUrl = (() => {
|
||||
try {
|
||||
return new URL(protectedHeader.url);
|
||||
@@ -288,6 +291,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
url,
|
||||
rawJwsPayload,
|
||||
getJWK: async (protectedHeader) => {
|
||||
// get jwk instead of kid
|
||||
if (!protectedHeader.kid) {
|
||||
throw new AcmeMalformedError({ message: "KID is required in the protected header" });
|
||||
}
|
||||
@@ -353,7 +357,10 @@ export const pkiAcmeServiceFactory = ({
|
||||
return {
|
||||
newNonce: buildUrl(profile.id, "/new-nonce"),
|
||||
newAccount: buildUrl(profile.id, "/new-account"),
|
||||
newOrder: buildUrl(profile.id, "/new-order")
|
||||
newOrder: buildUrl(profile.id, "/new-order"),
|
||||
meta: {
|
||||
externalAccountRequired: true
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -386,11 +393,61 @@ export const pkiAcmeServiceFactory = ({
|
||||
payload: TCreateAcmeAccountPayload;
|
||||
}): Promise<TAcmeResponse<TCreateAcmeAccountResponse>> => {
|
||||
const profile = await validateAcmeProfile(profileId);
|
||||
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
|
||||
|
||||
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
|
||||
profileId,
|
||||
alg,
|
||||
publicKeyThumbprint
|
||||
);
|
||||
if (onlyReturnExisting) {
|
||||
if (!existingAccount) {
|
||||
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
|
||||
}
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
status: "valid",
|
||||
contact: existingAccount.emails,
|
||||
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
|
||||
},
|
||||
headers: {
|
||||
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
|
||||
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Note: We only check EAB for the new account request. This is a very special case for cert-manager.
|
||||
// There's a bug in their ACME client implementation, they don't take the account KID value they have
|
||||
// and relying on a '{"onlyReturnExisting": true}' new-account request to find out their KID value.
|
||||
// But the problem is, that new-account request doesn't come with EAB. And while the get existing account operation
|
||||
// fails, they just discard the error and proceed to request a new order. Since no KID provided, their ACME
|
||||
// client will send JWK instead. As a result, we are seeing KID not provide in header error for the new-order
|
||||
// endpoint.
|
||||
//
|
||||
// To solve the problem, we lose the check for EAB a bit for the onlyReturnExisting new account request.
|
||||
// It should be fine as we've already checked EAB when they created the account.
|
||||
// And the private key ownership indicating they are the same user.
|
||||
// ref: https://github.com/cert-manager/cert-manager/issues/7388#issuecomment-3535630925
|
||||
if (!externalAccountBinding) {
|
||||
throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" });
|
||||
}
|
||||
if (existingAccount) {
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
status: "valid",
|
||||
contact: existingAccount.emails,
|
||||
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
|
||||
},
|
||||
headers: {
|
||||
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
|
||||
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
projectId: profile.projectId,
|
||||
projectDAL,
|
||||
@@ -441,30 +498,7 @@ export const pkiAcmeServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
|
||||
profileId,
|
||||
alg,
|
||||
publicKeyThumbprint
|
||||
);
|
||||
if (onlyReturnExisting && !existingAccount) {
|
||||
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
|
||||
}
|
||||
if (existingAccount) {
|
||||
// With the same public key, we found an existing account, just return it
|
||||
return {
|
||||
status: 200,
|
||||
body: {
|
||||
status: "valid",
|
||||
contact: existingAccount.emails,
|
||||
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
|
||||
},
|
||||
headers: {
|
||||
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
|
||||
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: handle unique constraint violation error, should be very very rare
|
||||
const newAccount = await acmeAccountDAL.create({
|
||||
profileId: profile.id,
|
||||
alg,
|
||||
|
||||
@@ -181,11 +181,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt"),
|
||||
tx
|
||||
.ref("shouldCheckSecretPermission")
|
||||
.withSchema(TableName.SecretApprovalPolicy)
|
||||
.as("policySecretReadAccessCompat")
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@@ -225,8 +221,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
envId: el.policyEnvId,
|
||||
deletedAt: el.policyDeletedAt,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
shouldCheckSecretPermission: el.policySecretReadAccessCompat
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
|
||||
@@ -106,6 +106,16 @@ export const GROUPS = {
|
||||
filterUsers:
|
||||
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
|
||||
},
|
||||
LIST_PROJECTS: {
|
||||
id: "The ID of the group to list projects for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th project.",
|
||||
limit: "The number of projects to return.",
|
||||
search: "The text string that project name or slug will be filtered by.",
|
||||
filterProjects:
|
||||
"Whether to filter the list of returned projects. 'assignedProjects' will only return projects assigned to the group, 'unassignedProjects' will only return projects not assigned to the group, undefined will return all projects in the organization.",
|
||||
orderBy: "The column to order projects by.",
|
||||
orderDirection: "The direction to order projects in."
|
||||
},
|
||||
ADD_USER: {
|
||||
id: "The ID of the group to add the user to.",
|
||||
username: "The username of the user to add to the group."
|
||||
|
||||
@@ -400,7 +400,7 @@ const envSchema = z
|
||||
isAcmeDevelopmentMode: data.NODE_ENV === "development" && data.ACME_DEVELOPMENT_MODE,
|
||||
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
||||
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
|
||||
isBddNockApiEnabled: data.NODE_ENV === "development" && data.BDD_NOCK_API_ENABLED,
|
||||
isBddNockApiEnabled: data.NODE_ENV !== "production" && data.BDD_NOCK_API_ENABLED,
|
||||
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()
|
||||
?.split(",")
|
||||
.map((el) => {
|
||||
|
||||
104
backend/src/server/routes/bdd/bdd-nock-router.dev.ts
Normal file
104
backend/src/server/routes/bdd/bdd-nock-router.dev.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import type { Definition } from "nock";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { ForbiddenRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
// When running in production, we don't want to even import nock, because it's not needed and it increases memory usage a lots.
|
||||
// It once caused an outage in the production environment.
|
||||
// This is why we would rather to crash the app if it's not in development mode (in that case, Kubernetes should stop it from rolling out).
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
throw new Error("BDD Nock API can only be enabled in development or test mode");
|
||||
}
|
||||
|
||||
export const registerBddNockRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const importNock = async () => {
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
const { default: nock } = await import("nock");
|
||||
return nock;
|
||||
};
|
||||
|
||||
const checkIfBddNockApiEnabled = () => {
|
||||
// Note: Please note that this API is only available in development mode and only for BDD tests.
|
||||
// This endpoint should NEVER BE ENABLED IN PRODUCTION!
|
||||
if (appCfg.NODE_ENV === "production" || !appCfg.isBddNockApiEnabled) {
|
||||
throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
|
||||
}
|
||||
};
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/define",
|
||||
schema: {
|
||||
body: z.object({ definitions: z.unknown().array() }),
|
||||
response: {
|
||||
200: z.object({ status: z.string() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
checkIfBddNockApiEnabled();
|
||||
const { body } = req;
|
||||
const { definitions } = body;
|
||||
logger.info(definitions, "Defining nock");
|
||||
const processedDefinitions = definitions.map((definition: unknown) => {
|
||||
const { path, ...rest } = definition as Definition;
|
||||
return {
|
||||
...rest,
|
||||
path:
|
||||
path !== undefined && typeof path === "string"
|
||||
? path
|
||||
: new RegExp((path as unknown as { regex: string }).regex ?? "")
|
||||
} as Definition;
|
||||
});
|
||||
|
||||
const nock = await importNock();
|
||||
nock.define(processedDefinitions);
|
||||
// Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
return { status: "ok" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/clean-all",
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({ status: z.string() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async () => {
|
||||
checkIfBddNockApiEnabled();
|
||||
logger.info("Cleaning all nocks");
|
||||
const nock = await importNock();
|
||||
nock.cleanAll();
|
||||
return { status: "ok" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/restore",
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({ status: z.string() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async () => {
|
||||
checkIfBddNockApiEnabled();
|
||||
logger.info("Restore network requests from nock");
|
||||
const nock = await importNock();
|
||||
nock.restore();
|
||||
return { status: "ok" };
|
||||
}
|
||||
});
|
||||
};
|
||||
6
backend/src/server/routes/bdd/bdd-nock-router.ts
Normal file
6
backend/src/server/routes/bdd/bdd-nock-router.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export const registerBddNockRouter = async () => {
|
||||
// This route is only available in development or test mode.
|
||||
// The actual implementation is in the dev.ts file and will be aliased to that file in development or test mode.
|
||||
// And if somehow we try to enable it in production, we will throw an error.
|
||||
throw new Error("BDD Nock should not be enabled in production");
|
||||
};
|
||||
@@ -1,3 +1,4 @@
|
||||
import { registerBddNockRouter } from "@bdd_routes/bdd-nock-router";
|
||||
import { CronJob } from "cron";
|
||||
import { Knex } from "knex";
|
||||
import { monitorEventLoopDelay } from "perf_hooks";
|
||||
@@ -2431,6 +2432,7 @@ export const registerRoutes = async (
|
||||
}
|
||||
}
|
||||
|
||||
await kmsService.startService(hsmStatus);
|
||||
await telemetryQueue.startTelemetryCheck();
|
||||
await telemetryQueue.startAggregatedEventsJob();
|
||||
await dailyResourceCleanUp.init();
|
||||
@@ -2443,7 +2445,6 @@ export const registerRoutes = async (
|
||||
await pkiSubscriberQueue.startDailyAutoRenewalJob();
|
||||
await pkiAlertV2Queue.init();
|
||||
await certificateV3Queue.init();
|
||||
await kmsService.startService(hsmStatus);
|
||||
await microsoftTeamsService.start();
|
||||
await dynamicSecretQueueService.init();
|
||||
await eventBusService.init();
|
||||
@@ -2698,6 +2699,12 @@ export const registerRoutes = async (
|
||||
await server.register(registerV3Routes, { prefix: "/api/v3" });
|
||||
await server.register(registerV4Routes, { prefix: "/api/v4" });
|
||||
|
||||
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
|
||||
// This route should NEVER BE ENABLED IN PRODUCTION!
|
||||
if (getConfig().isBddNockApiEnabled) {
|
||||
await server.register(registerBddNockRouter, { prefix: "/api/__bdd_nock__" });
|
||||
}
|
||||
|
||||
server.addHook("onClose", async () => {
|
||||
cronJobs.forEach((job) => job.stop());
|
||||
await telemetryService.flushAll();
|
||||
|
||||
@@ -9,6 +9,8 @@ import {
|
||||
SuperAdminSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
|
||||
import { LicenseType } from "@app/ee/services/license/license-types";
|
||||
import { getConfig, overridableKeys } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -65,6 +67,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
const config = await getServerCfg();
|
||||
const serverEnvs = getConfig();
|
||||
|
||||
const licenseKeyConfig = getLicenseKeyConfig();
|
||||
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
|
||||
|
||||
return {
|
||||
config: {
|
||||
...config,
|
||||
@@ -73,7 +78,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED,
|
||||
isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE
|
||||
isOfflineUsageReportsEnabled: hasOfflineLicense
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
// import { z } from "zod";
|
||||
|
||||
// import { getConfig } from "@app/lib/config/env";
|
||||
// import { ForbiddenRequestError } from "@app/lib/errors";
|
||||
// import { logger } from "@app/lib/logger";
|
||||
// import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
// import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
// export const registerBddNockRouter = async (server: FastifyZodProvider) => {
|
||||
// const checkIfBddNockApiEnabled = () => {
|
||||
// const appCfg = getConfig();
|
||||
// // Note: Please note that this API is only available in development mode and only for BDD tests.
|
||||
// // This endpoint should NEVER BE ENABLED IN PRODUCTION!
|
||||
// if (appCfg.NODE_ENV !== "development" || !appCfg.isBddNockApiEnabled) {
|
||||
// throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
|
||||
// }
|
||||
// };
|
||||
|
||||
// server.route({
|
||||
// method: "POST",
|
||||
// url: "/define",
|
||||
// schema: {
|
||||
// body: z.object({ definitions: z.unknown().array() }),
|
||||
// response: {
|
||||
// 200: z.object({ status: z.string() })
|
||||
// }
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT]),
|
||||
// handler: async (req) => {
|
||||
// checkIfBddNockApiEnabled();
|
||||
// const { body } = req;
|
||||
// const { definitions } = body;
|
||||
// logger.info(definitions, "Defining nock");
|
||||
// const processedDefinitions = definitions.map((definition: unknown) => {
|
||||
// const { path, ...rest } = definition as Definition;
|
||||
// return {
|
||||
// ...rest,
|
||||
// path:
|
||||
// path !== undefined && typeof path === "string"
|
||||
// ? path
|
||||
// : new RegExp((path as unknown as { regex: string }).regex ?? "")
|
||||
// } as Definition;
|
||||
// });
|
||||
|
||||
// nock.define(processedDefinitions);
|
||||
// // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
|
||||
// if (!nock.isActive()) {
|
||||
// nock.activate();
|
||||
// }
|
||||
// return { status: "ok" };
|
||||
// }
|
||||
// });
|
||||
|
||||
// server.route({
|
||||
// method: "POST",
|
||||
// url: "/clean-all",
|
||||
// schema: {
|
||||
// response: {
|
||||
// 200: z.object({ status: z.string() })
|
||||
// }
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT]),
|
||||
// handler: async () => {
|
||||
// checkIfBddNockApiEnabled();
|
||||
// logger.info("Cleaning all nocks");
|
||||
// nock.cleanAll();
|
||||
// return { status: "ok" };
|
||||
// }
|
||||
// });
|
||||
|
||||
// server.route({
|
||||
// method: "POST",
|
||||
// url: "/restore",
|
||||
// schema: {
|
||||
// response: {
|
||||
// 200: z.object({ status: z.string() })
|
||||
// }
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT]),
|
||||
// handler: async () => {
|
||||
// checkIfBddNockApiEnabled();
|
||||
// logger.info("Restore network requests from nock");
|
||||
// nock.restore();
|
||||
// return { status: "ok" };
|
||||
// }
|
||||
// });
|
||||
// };
|
||||
@@ -8,7 +8,6 @@ import { registerSecretSyncRouter, SECRET_SYNC_REGISTER_ROUTER_MAP } from "@app/
|
||||
|
||||
import { registerAdminRouter } from "./admin-router";
|
||||
import { registerAuthRoutes } from "./auth-router";
|
||||
// import { registerBddNockRouter } from "./bdd-nock-router";
|
||||
import { registerProjectBotRouter } from "./bot-router";
|
||||
import { registerCaRouter } from "./certificate-authority-router";
|
||||
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
|
||||
@@ -238,10 +237,4 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
|
||||
await server.register(registerEventRouter, { prefix: "/events" });
|
||||
await server.register(registerUpgradePathRouter, { prefix: "/upgrade-path" });
|
||||
|
||||
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
|
||||
// This route should NEVER BE ENABLED IN PRODUCTION!
|
||||
// if (getConfig().isBddNockApiEnabled) {
|
||||
// await server.register(registerBddNockRouter, { prefix: "/bdd-nock" });
|
||||
// }
|
||||
};
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
import {
|
||||
AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION,
|
||||
AwsSecretsManagerPkiSyncSchema,
|
||||
CreateAwsSecretsManagerPkiSyncSchema,
|
||||
UpdateAwsSecretsManagerPkiSyncSchema
|
||||
} from "@app/services/pki-sync/aws-secrets-manager";
|
||||
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
|
||||
|
||||
import { registerSyncPkiEndpoints } from "./pki-sync-endpoints";
|
||||
|
||||
export const registerAwsSecretsManagerPkiSyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncPkiEndpoints({
|
||||
destination: PkiSync.AwsSecretsManager,
|
||||
server,
|
||||
responseSchema: AwsSecretsManagerPkiSyncSchema,
|
||||
createSchema: CreateAwsSecretsManagerPkiSyncSchema,
|
||||
updateSchema: UpdateAwsSecretsManagerPkiSyncSchema,
|
||||
syncOptions: {
|
||||
canImportCertificates: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION.canImportCertificates,
|
||||
canRemoveCertificates: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION.canRemoveCertificates
|
||||
}
|
||||
});
|
||||
@@ -0,0 +1,17 @@
|
||||
import { ChefPkiSyncSchema, CreateChefPkiSyncSchema, UpdateChefPkiSyncSchema } from "@app/services/pki-sync/chef";
|
||||
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
|
||||
|
||||
import { registerSyncPkiEndpoints } from "./pki-sync-endpoints";
|
||||
|
||||
export const registerChefPkiSyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncPkiEndpoints({
|
||||
destination: PkiSync.Chef,
|
||||
server,
|
||||
responseSchema: ChefPkiSyncSchema,
|
||||
createSchema: CreateChefPkiSyncSchema,
|
||||
updateSchema: UpdateChefPkiSyncSchema,
|
||||
syncOptions: {
|
||||
canImportCertificates: false,
|
||||
canRemoveCertificates: true
|
||||
}
|
||||
});
|
||||
@@ -1,11 +1,15 @@
|
||||
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
|
||||
|
||||
import { registerAwsCertificateManagerPkiSyncRouter } from "./aws-certificate-manager-pki-sync-router";
|
||||
import { registerAwsSecretsManagerPkiSyncRouter } from "./aws-secrets-manager-pki-sync-router";
|
||||
import { registerAzureKeyVaultPkiSyncRouter } from "./azure-key-vault-pki-sync-router";
|
||||
import { registerChefPkiSyncRouter } from "./chef-pki-sync-router";
|
||||
|
||||
export * from "./pki-sync-router";
|
||||
|
||||
export const PKI_SYNC_REGISTER_ROUTER_MAP: Record<PkiSync, (server: FastifyZodProvider) => Promise<void>> = {
|
||||
[PkiSync.AzureKeyVault]: registerAzureKeyVaultPkiSyncRouter,
|
||||
[PkiSync.AwsCertificateManager]: registerAwsCertificateManagerPkiSyncRouter
|
||||
[PkiSync.AwsCertificateManager]: registerAwsCertificateManagerPkiSyncRouter,
|
||||
[PkiSync.AwsSecretsManager]: registerAwsSecretsManagerPkiSyncRouter,
|
||||
[PkiSync.Chef]: registerChefPkiSyncRouter
|
||||
};
|
||||
|
||||
@@ -23,6 +23,8 @@ import { mapEnumsForValidation } from "@app/services/certificate-common/certific
|
||||
import { EnrollmentType } from "@app/services/certificate-profile/certificate-profile-types";
|
||||
import { validateTemplateRegexField } from "@app/services/certificate-template/certificate-template-validators";
|
||||
|
||||
import { booleanSchema } from "../sanitizedSchemas";
|
||||
|
||||
interface CertificateRequestForService {
|
||||
commonName?: string;
|
||||
keyUsages?: CertKeyUsageType[];
|
||||
@@ -87,7 +89,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
)
|
||||
.optional(),
|
||||
signatureAlgorithm: z.nativeEnum(CertSignatureAlgorithm),
|
||||
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
|
||||
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
|
||||
removeRootsFromChain: booleanSchema.default(false).optional()
|
||||
})
|
||||
.refine(validateTtlAndDateFields, {
|
||||
message:
|
||||
@@ -131,7 +134,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
profileId: req.body.profileId,
|
||||
certificateRequest: mappedCertificateRequest
|
||||
certificateRequest: mappedCertificateRequest,
|
||||
removeRootsFromChain: req.body.removeRootsFromChain
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@@ -171,7 +175,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
.min(1, "TTL cannot be empty")
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number"),
|
||||
notBefore: validateCaDateField.optional(),
|
||||
notAfter: validateCaDateField.optional()
|
||||
notAfter: validateCaDateField.optional(),
|
||||
removeRootsFromChain: booleanSchema.default(false).optional()
|
||||
})
|
||||
.refine(validateTtlAndDateFields, {
|
||||
message:
|
||||
@@ -206,7 +211,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
notBefore: req.body.notBefore ? new Date(req.body.notBefore) : undefined,
|
||||
notAfter: req.body.notAfter ? new Date(req.body.notAfter) : undefined,
|
||||
enrollmentType: EnrollmentType.API
|
||||
enrollmentType: EnrollmentType.API,
|
||||
removeRootsFromChain: req.body.removeRootsFromChain
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@@ -262,7 +268,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
notAfter: validateCaDateField.optional(),
|
||||
commonName: validateTemplateRegexField.optional(),
|
||||
signatureAlgorithm: z.nativeEnum(CertSignatureAlgorithm),
|
||||
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
|
||||
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
|
||||
removeRootsFromChain: booleanSchema.default(false).optional()
|
||||
})
|
||||
.refine(validateTtlAndDateFields, {
|
||||
message:
|
||||
@@ -325,7 +332,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
notAfter: req.body.notAfter ? new Date(req.body.notAfter) : undefined,
|
||||
signatureAlgorithm: req.body.signatureAlgorithm,
|
||||
keyAlgorithm: req.body.keyAlgorithm
|
||||
}
|
||||
},
|
||||
removeRootsFromChain: req.body.removeRootsFromChain
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@@ -357,6 +365,11 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
params: z.object({
|
||||
certificateId: z.string().uuid()
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
removeRootsFromChain: booleanSchema.default(false).optional()
|
||||
})
|
||||
.optional(),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificate: z.string().trim(),
|
||||
@@ -375,7 +388,8 @@ export const registerCertificatesRouter = async (server: FastifyZodProvider) =>
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
certificateId: req.params.certificateId
|
||||
certificateId: req.params.certificateId,
|
||||
removeRootsFromChain: req.body?.removeRootsFromChain
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
|
||||
@@ -170,7 +170,8 @@ const PKI_APP_CONNECTIONS = [
|
||||
AppConnection.AWS,
|
||||
AppConnection.Cloudflare,
|
||||
AppConnection.AzureADCS,
|
||||
AppConnection.AzureKeyVault
|
||||
AppConnection.AzureKeyVault,
|
||||
AppConnection.Chef
|
||||
];
|
||||
|
||||
export const listAppConnectionOptions = (projectType?: ProjectType) => {
|
||||
|
||||
@@ -196,3 +196,62 @@ export const convertExtendedKeyUsageArrayToLegacy = (
|
||||
): CertExtendedKeyUsage[] | undefined => {
|
||||
return usages?.map(convertToLegacyExtendedKeyUsage);
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses a PEM-formatted certificate chain and returns individual certificates
|
||||
* @param certificateChain - PEM-formatted certificate chain
|
||||
* @returns Array of individual PEM certificates
|
||||
*/
|
||||
const parseCertificateChain = (certificateChain: string): string[] => {
|
||||
if (!certificateChain || typeof certificateChain !== "string") {
|
||||
return [];
|
||||
}
|
||||
|
||||
const certRegex = new RE2(/-----BEGIN CERTIFICATE-----[\s\S]*?-----END CERTIFICATE-----/g);
|
||||
const certificates = certificateChain.match(certRegex);
|
||||
|
||||
return certificates ? certificates.map((cert) => cert.trim()) : [];
|
||||
};
|
||||
|
||||
/**
|
||||
* Removes the root CA certificate from a certificate chain, leaving only intermediate certificates.
|
||||
* If the chain contains only the root CA certificate, returns an empty string.
|
||||
*
|
||||
* @param certificateChain - PEM-formatted certificate chain containing leaf + intermediates + root CA
|
||||
* @returns PEM-formatted certificate chain with only intermediate certificates (no root CA)
|
||||
*/
|
||||
export const removeRootCaFromChain = (certificateChain?: string): string => {
|
||||
if (!certificateChain || typeof certificateChain !== "string") {
|
||||
return "";
|
||||
}
|
||||
|
||||
const certificates = parseCertificateChain(certificateChain);
|
||||
|
||||
if (certificates.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const intermediateCerts = certificates.slice(0, -1);
|
||||
|
||||
return intermediateCerts.join("\n");
|
||||
};
|
||||
|
||||
/**
|
||||
* Extracts the root CA certificate from a certificate chain.
|
||||
*
|
||||
* @param certificateChain - PEM-formatted certificate chain containing leaf + intermediates + root CA
|
||||
* @returns PEM-formatted root CA certificate, or empty string if not found
|
||||
*/
|
||||
export const extractRootCaFromChain = (certificateChain?: string): string => {
|
||||
if (!certificateChain || typeof certificateChain !== "string") {
|
||||
return "";
|
||||
}
|
||||
|
||||
const certificates = parseCertificateChain(certificateChain);
|
||||
|
||||
if (certificates.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return certificates[certificates.length - 1];
|
||||
};
|
||||
|
||||
@@ -428,7 +428,13 @@ describe("CertificateProfileService", () => {
|
||||
service.createProfile({
|
||||
...mockActor,
|
||||
projectId: "project-123",
|
||||
data: validProfileData
|
||||
data: {
|
||||
...validProfileData,
|
||||
enrollmentType: EnrollmentType.ACME,
|
||||
acmeConfig: {},
|
||||
apiConfig: undefined,
|
||||
estConfig: undefined
|
||||
}
|
||||
})
|
||||
).rejects.toThrowError(
|
||||
new BadRequestError({
|
||||
|
||||
@@ -47,7 +47,8 @@ import {
|
||||
convertKeyUsageArrayFromLegacy,
|
||||
convertKeyUsageArrayToLegacy,
|
||||
mapEnumsForValidation,
|
||||
normalizeDateForApi
|
||||
normalizeDateForApi,
|
||||
removeRootCaFromChain
|
||||
} from "../certificate-common/certificate-utils";
|
||||
import { TCertificateSyncDALFactory } from "../certificate-sync/certificate-sync-dal";
|
||||
import { TPkiSyncDALFactory } from "../pki-sync/pki-sync-dal";
|
||||
@@ -366,7 +367,8 @@ export const certificateV3ServiceFactory = ({
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
actorOrgId,
|
||||
removeRootsFromChain
|
||||
}: TIssueCertificateFromProfileDTO): Promise<TCertificateFromProfileResponse> => {
|
||||
const profile = await validateProfileAndPermissions(
|
||||
profileId,
|
||||
@@ -480,10 +482,15 @@ export const certificateV3ServiceFactory = ({
|
||||
renewBeforeDays: finalRenewBeforeDays
|
||||
});
|
||||
|
||||
let finalCertificateChain = bufferToString(certificateChain);
|
||||
if (removeRootsFromChain) {
|
||||
finalCertificateChain = removeRootCaFromChain(finalCertificateChain);
|
||||
}
|
||||
|
||||
return {
|
||||
certificate: bufferToString(certificate),
|
||||
issuingCaCertificate: bufferToString(issuingCaCertificate),
|
||||
certificateChain: bufferToString(certificateChain),
|
||||
certificateChain: finalCertificateChain,
|
||||
privateKey: bufferToString(privateKey),
|
||||
serialNumber,
|
||||
certificateId: cert.id,
|
||||
@@ -503,7 +510,8 @@ export const certificateV3ServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
enrollmentType
|
||||
enrollmentType,
|
||||
removeRootsFromChain
|
||||
}: TSignCertificateFromProfileDTO): Promise<Omit<TCertificateFromProfileResponse, "privateKey">> => {
|
||||
const profile = await validateProfileAndPermissions(
|
||||
profileId,
|
||||
@@ -590,7 +598,10 @@ export const certificateV3ServiceFactory = ({
|
||||
});
|
||||
|
||||
const certificateString = extractCertificateFromBuffer(certificate as unknown as Buffer);
|
||||
const certificateChainString = extractCertificateFromBuffer(certificateChain as unknown as Buffer);
|
||||
let certificateChainString = extractCertificateFromBuffer(certificateChain as unknown as Buffer);
|
||||
if (removeRootsFromChain) {
|
||||
certificateChainString = removeRootCaFromChain(certificateChainString);
|
||||
}
|
||||
|
||||
return {
|
||||
certificate: certificateString,
|
||||
@@ -610,7 +621,8 @@ export const certificateV3ServiceFactory = ({
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
actorOrgId,
|
||||
removeRootsFromChain
|
||||
}: TOrderCertificateFromProfileDTO): Promise<TCertificateOrderResponse> => {
|
||||
const profile = await validateProfileAndPermissions(
|
||||
profileId,
|
||||
@@ -665,7 +677,8 @@ export const certificateV3ServiceFactory = ({
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
actorOrgId,
|
||||
removeRootsFromChain
|
||||
});
|
||||
|
||||
const orderId = randomUUID();
|
||||
@@ -703,7 +716,8 @@ export const certificateV3ServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
internal = false
|
||||
internal = false,
|
||||
removeRootsFromChain
|
||||
}: TRenewCertificateDTO & { internal?: boolean }): Promise<TCertificateFromProfileResponse> => {
|
||||
const renewalResult = await certificateDAL.transaction(async (tx) => {
|
||||
const originalCert = await certificateDAL.findById(certificateId, tx);
|
||||
@@ -929,10 +943,14 @@ export const certificateV3ServiceFactory = ({
|
||||
pkiSyncQueue
|
||||
});
|
||||
|
||||
let finalCertificateChain = renewalResult.certificateChain;
|
||||
if (removeRootsFromChain) {
|
||||
finalCertificateChain = removeRootCaFromChain(finalCertificateChain);
|
||||
}
|
||||
return {
|
||||
certificate: renewalResult.certificate,
|
||||
issuingCaCertificate: renewalResult.issuingCaCertificate,
|
||||
certificateChain: renewalResult.certificateChain,
|
||||
certificateChain: finalCertificateChain,
|
||||
serialNumber: renewalResult.serialNumber,
|
||||
certificateId: renewalResult.newCert.id,
|
||||
projectId: renewalResult.profile.projectId,
|
||||
|
||||
@@ -26,6 +26,7 @@ export type TIssueCertificateFromProfileDTO = {
|
||||
signatureAlgorithm?: string;
|
||||
keyAlgorithm?: string;
|
||||
};
|
||||
removeRootsFromChain?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TSignCertificateFromProfileDTO = {
|
||||
@@ -37,6 +38,7 @@ export type TSignCertificateFromProfileDTO = {
|
||||
notBefore?: Date;
|
||||
notAfter?: Date;
|
||||
enrollmentType: EnrollmentType;
|
||||
removeRootsFromChain?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TOrderCertificateFromProfileDTO = {
|
||||
@@ -57,6 +59,7 @@ export type TOrderCertificateFromProfileDTO = {
|
||||
signatureAlgorithm?: string;
|
||||
keyAlgorithm?: string;
|
||||
};
|
||||
removeRootsFromChain?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TCertificateFromProfileResponse = {
|
||||
@@ -101,6 +104,7 @@ export type TCertificateOrderResponse = {
|
||||
|
||||
export type TRenewCertificateDTO = {
|
||||
certificateId: string;
|
||||
removeRootsFromChain?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateRenewalConfigDTO = {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import crypto from "crypto";
|
||||
|
||||
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { LicenseType } from "@app/ee/services/license/license-types";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TOfflineUsageReportDALFactory } from "./offline-usage-report-dal";
|
||||
@@ -30,10 +31,13 @@ export const offlineUsageReportServiceFactory = ({
|
||||
};
|
||||
|
||||
const generateUsageReportCSV = async () => {
|
||||
const cfg = getConfig();
|
||||
if (!cfg.LICENSE_KEY_OFFLINE) {
|
||||
const licenseKeyConfig = getLicenseKeyConfig();
|
||||
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
|
||||
|
||||
if (!hasOfflineLicense) {
|
||||
throw new BadRequestError({
|
||||
message: "Offline usage reports are not enabled. LICENSE_KEY_OFFLINE must be configured."
|
||||
message:
|
||||
"Offline usage reports are not enabled. Usage reports are only available for self-hosted offline instances"
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ export const AwsCertificateManagerPkiSyncConfigSchema = z.object({
|
||||
const AwsCertificateManagerPkiSyncOptionsSchema = z.object({
|
||||
canImportCertificates: z.boolean().default(false),
|
||||
canRemoveCertificates: z.boolean().default(true),
|
||||
includeRootCa: z.boolean().default(false),
|
||||
preserveArn: z.boolean().default(true),
|
||||
certificateNameSchema: z
|
||||
.string()
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
import RE2 from "re2";
|
||||
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
|
||||
|
||||
/**
|
||||
* AWS Secrets Manager naming constraints for secrets
|
||||
*/
|
||||
export const AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING = {
|
||||
/**
|
||||
* Regular expression pattern for valid AWS Secrets Manager secret names
|
||||
* Must contain only alphanumeric characters, hyphens, and underscores
|
||||
* Must be 1-512 characters long
|
||||
*/
|
||||
NAME_PATTERN: new RE2("^[\\w-]+$"),
|
||||
|
||||
/**
|
||||
* String of characters that are forbidden in AWS Secrets Manager secret names
|
||||
*/
|
||||
FORBIDDEN_CHARACTERS: " @#$%^&*()+=[]{}|;':\"<>?,./",
|
||||
|
||||
/**
|
||||
* Minimum length for secret names in AWS Secrets Manager
|
||||
*/
|
||||
MIN_LENGTH: 1,
|
||||
|
||||
/**
|
||||
* Maximum length for secret names in AWS Secrets Manager
|
||||
*/
|
||||
MAX_LENGTH: 512,
|
||||
|
||||
/**
|
||||
* String representation of the allowed character pattern (for UI display)
|
||||
*/
|
||||
ALLOWED_CHARACTER_PATTERN: "^[\\w-]+$"
|
||||
} as const;
|
||||
|
||||
export const AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS = {
|
||||
INFISICAL_PREFIX: "infisical-",
|
||||
DEFAULT_ENVIRONMENT: "production",
|
||||
DEFAULT_CERTIFICATE_NAME_SCHEMA: "infisical-{{certificateId}}",
|
||||
DEFAULT_FIELD_MAPPINGS: {
|
||||
certificate: "certificate",
|
||||
privateKey: "private_key",
|
||||
certificateChain: "certificate_chain",
|
||||
caCertificate: "ca_certificate"
|
||||
}
|
||||
};
|
||||
|
||||
export const AWS_SECRETS_MANAGER_PKI_SYNC_OPTIONS = {
|
||||
DEFAULT_CAN_REMOVE_CERTIFICATES: true,
|
||||
DEFAULT_PRESERVE_SECRET_ON_RENEWAL: true,
|
||||
DEFAULT_UPDATE_EXISTING_CERTIFICATES: true,
|
||||
DEFAULT_CAN_IMPORT_CERTIFICATES: false
|
||||
};
|
||||
|
||||
/**
|
||||
* AWS Secrets Manager PKI Sync list option configuration
|
||||
*/
|
||||
export const AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION = {
|
||||
name: "AWS Secrets Manager" as const,
|
||||
connection: AppConnection.AWS,
|
||||
destination: PkiSync.AwsSecretsManager,
|
||||
canImportCertificates: false,
|
||||
canRemoveCertificates: true,
|
||||
defaultCertificateNameSchema: "infisical-{{certificateId}}",
|
||||
forbiddenCharacters: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS,
|
||||
allowedCharacterPattern: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.ALLOWED_CHARACTER_PATTERN,
|
||||
maxCertificateNameLength: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH,
|
||||
minCertificateNameLength: AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH
|
||||
} as const;
|
||||
@@ -0,0 +1,555 @@
|
||||
/* eslint-disable no-continue */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import {
|
||||
CreateSecretCommand,
|
||||
DeleteSecretCommand,
|
||||
ListSecretsCommand,
|
||||
SecretsManagerClient,
|
||||
UpdateSecretCommand
|
||||
} from "@aws-sdk/client-secrets-manager";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { TCertificateSyncs } from "@app/db/schemas";
|
||||
import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { AWSRegion } from "@app/services/app-connection/app-connection-enums";
|
||||
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
|
||||
import { TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types";
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateSyncDALFactory } from "@app/services/certificate-sync/certificate-sync-dal";
|
||||
import { CertificateSyncStatus } from "@app/services/certificate-sync/certificate-sync-enums";
|
||||
import { createConnectionQueue, RateLimitConfig } from "@app/services/connection-queue";
|
||||
import { matchesCertificateNameSchema } from "@app/services/pki-sync/pki-sync-fns";
|
||||
import { TCertificateMap, TPkiSyncWithCredentials } from "@app/services/pki-sync/pki-sync-types";
|
||||
|
||||
import { AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS } from "./aws-secrets-manager-pki-sync-constants";
|
||||
import {
|
||||
AwsSecretsManagerCertificateSecret,
|
||||
SyncCertificatesResult,
|
||||
TAwsSecretsManagerPkiSyncWithCredentials
|
||||
} from "./aws-secrets-manager-pki-sync-types";
|
||||
|
||||
const AWS_SECRETS_MANAGER_RATE_LIMIT_CONFIG: RateLimitConfig = {
|
||||
MAX_CONCURRENT_REQUESTS: 10,
|
||||
BASE_DELAY: 1000,
|
||||
MAX_DELAY: 30000,
|
||||
MAX_RETRIES: 3,
|
||||
RATE_LIMIT_STATUS_CODES: [429, 503]
|
||||
};
|
||||
|
||||
const awsSecretsManagerConnectionQueue = createConnectionQueue(AWS_SECRETS_MANAGER_RATE_LIMIT_CONFIG);
|
||||
const { withRateLimitRetry } = awsSecretsManagerConnectionQueue;
|
||||
|
||||
const MAX_RETRIES = 10;
|
||||
|
||||
const sleep = async () =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 1000);
|
||||
});
|
||||
|
||||
const isInfisicalManagedCertificate = (secretName: string, pkiSync: TPkiSyncWithCredentials): boolean => {
|
||||
const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined;
|
||||
const certificateNameSchema = syncOptions?.certificateNameSchema;
|
||||
|
||||
if (certificateNameSchema) {
|
||||
const environment = AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.DEFAULT_ENVIRONMENT;
|
||||
return matchesCertificateNameSchema(secretName, environment, certificateNameSchema);
|
||||
}
|
||||
|
||||
return secretName.startsWith(AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX);
|
||||
};
|
||||
|
||||
const parseErrorMessage = (error: unknown): string => {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (typeof error === "string") {
|
||||
return error;
|
||||
}
|
||||
|
||||
if (error && typeof error === "object" && "message" in error) {
|
||||
const { message } = error as { message: unknown };
|
||||
if (typeof message === "string") {
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
||||
return "Unknown error occurred";
|
||||
};
|
||||
|
||||
const getSecretsManagerClient = async (pkiSync: TAwsSecretsManagerPkiSyncWithCredentials) => {
|
||||
const { destinationConfig, connection } = pkiSync;
|
||||
|
||||
const config = await getAwsConnectionConfig(
|
||||
connection as TAwsConnectionConfig,
|
||||
destinationConfig.region as AWSRegion
|
||||
);
|
||||
|
||||
if (!config.credentials) {
|
||||
throw new Error("AWS credentials not found in connection configuration");
|
||||
}
|
||||
|
||||
const secretsManagerClient = new SecretsManagerClient({
|
||||
region: config.region,
|
||||
useFipsEndpoint: crypto.isFipsModeEnabled(),
|
||||
sha256: CustomAWSHasher,
|
||||
credentials: config.credentials
|
||||
});
|
||||
|
||||
return secretsManagerClient;
|
||||
};
|
||||
|
||||
type TAwsSecretsManagerPkiSyncFactoryDeps = {
|
||||
certificateDAL: Pick<TCertificateDALFactory, "findById">;
|
||||
certificateSyncDAL: Pick<
|
||||
TCertificateSyncDALFactory,
|
||||
| "removeCertificates"
|
||||
| "addCertificates"
|
||||
| "findByPkiSyncAndCertificate"
|
||||
| "updateById"
|
||||
| "findByPkiSyncId"
|
||||
| "updateSyncStatus"
|
||||
>;
|
||||
};
|
||||
|
||||
export const awsSecretsManagerPkiSyncFactory = ({
|
||||
certificateDAL,
|
||||
certificateSyncDAL
|
||||
}: TAwsSecretsManagerPkiSyncFactoryDeps) => {
|
||||
const $getSecretsManagerSecrets = async (
|
||||
pkiSync: TAwsSecretsManagerPkiSyncWithCredentials,
|
||||
syncId = "unknown"
|
||||
): Promise<Record<string, string>> => {
|
||||
const client = await getSecretsManagerClient(pkiSync);
|
||||
const secrets: Record<string, string> = {};
|
||||
let hasNext = true;
|
||||
let nextToken: string | undefined;
|
||||
let attempt = 0;
|
||||
|
||||
while (hasNext) {
|
||||
try {
|
||||
const currentToken = nextToken;
|
||||
const output = await withRateLimitRetry(
|
||||
() => client.send(new ListSecretsCommand({ NextToken: currentToken })),
|
||||
{
|
||||
operation: "list-secrets-manager-secrets",
|
||||
syncId
|
||||
}
|
||||
);
|
||||
|
||||
attempt = 0;
|
||||
|
||||
if (output.SecretList) {
|
||||
output.SecretList.forEach((secretEntry) => {
|
||||
if (
|
||||
secretEntry.Name &&
|
||||
isInfisicalManagedCertificate(secretEntry.Name, pkiSync as unknown as TPkiSyncWithCredentials)
|
||||
) {
|
||||
secrets[secretEntry.Name] = secretEntry.ARN || secretEntry.Name;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
hasNext = Boolean(output.NextToken);
|
||||
nextToken = output.NextToken;
|
||||
} catch (e) {
|
||||
if (
|
||||
e &&
|
||||
typeof e === "object" &&
|
||||
"name" in e &&
|
||||
(e as { name: string }).name === "ThrottlingException" &&
|
||||
attempt < MAX_RETRIES
|
||||
) {
|
||||
attempt += 1;
|
||||
await sleep();
|
||||
continue;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
return secrets;
|
||||
};
|
||||
|
||||
const syncCertificates = async (
|
||||
pkiSync: TPkiSyncWithCredentials,
|
||||
certificateMap: TCertificateMap
|
||||
): Promise<SyncCertificatesResult> => {
|
||||
const awsPkiSync = pkiSync as unknown as TAwsSecretsManagerPkiSyncWithCredentials;
|
||||
const client = await getSecretsManagerClient(awsPkiSync);
|
||||
|
||||
const existingSecrets = await $getSecretsManagerSecrets(awsPkiSync, pkiSync.id);
|
||||
|
||||
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id);
|
||||
const syncRecordsByCertId = new Map<string, TCertificateSyncs>();
|
||||
const syncRecordsByExternalId = new Map<string, TCertificateSyncs>();
|
||||
|
||||
existingSyncRecords.forEach((record: TCertificateSyncs) => {
|
||||
if (record.certificateId) {
|
||||
syncRecordsByCertId.set(record.certificateId, record);
|
||||
}
|
||||
if (record.externalIdentifier) {
|
||||
syncRecordsByExternalId.set(record.externalIdentifier, record);
|
||||
}
|
||||
});
|
||||
|
||||
type CertificateUploadData = {
|
||||
secretName: string;
|
||||
certificateData: AwsSecretsManagerCertificateSecret;
|
||||
certificateId: string;
|
||||
isUpdate: boolean;
|
||||
targetSecretName: string;
|
||||
oldCertificateIdToRemove?: string;
|
||||
};
|
||||
|
||||
const setCertificates: CertificateUploadData[] = [];
|
||||
const validationErrors: Array<{ name: string; error: string }> = [];
|
||||
|
||||
const syncOptions = pkiSync.syncOptions as
|
||||
| {
|
||||
canRemoveCertificates?: boolean;
|
||||
preserveSecretOnRenewal?: boolean;
|
||||
fieldMappings?: {
|
||||
certificate?: string;
|
||||
privateKey?: string;
|
||||
certificateChain?: string;
|
||||
caCertificate?: string;
|
||||
};
|
||||
certificateNameSchema?: string;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
const canRemoveCertificates = syncOptions?.canRemoveCertificates ?? true;
|
||||
const preserveSecretOnRenewal = syncOptions?.preserveSecretOnRenewal ?? true;
|
||||
|
||||
const fieldMappings = {
|
||||
certificate: syncOptions?.fieldMappings?.certificate ?? "certificate",
|
||||
privateKey: syncOptions?.fieldMappings?.privateKey ?? "private_key",
|
||||
certificateChain: syncOptions?.fieldMappings?.certificateChain ?? "certificate_chain",
|
||||
caCertificate: syncOptions?.fieldMappings?.caCertificate ?? "ca_certificate"
|
||||
};
|
||||
|
||||
const activeExternalIdentifiers = new Set<string>();
|
||||
|
||||
for (const [certName, certData] of Object.entries(certificateMap)) {
|
||||
const { cert, privateKey: certPrivateKey, certificateChain, caCertificate, certificateId } = certData;
|
||||
|
||||
if (!cert || cert.trim().length === 0) {
|
||||
validationErrors.push({
|
||||
name: certName,
|
||||
error: "Certificate content is empty or missing"
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!certPrivateKey || certPrivateKey.trim().length === 0) {
|
||||
validationErrors.push({
|
||||
name: certName,
|
||||
error: "Private key content is empty or missing"
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!certificateId || typeof certificateId !== "string") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const certificateData: AwsSecretsManagerCertificateSecret = {
|
||||
[fieldMappings.certificate]: cert,
|
||||
[fieldMappings.privateKey]: certPrivateKey
|
||||
};
|
||||
|
||||
if (certificateChain && certificateChain.trim().length > 0) {
|
||||
certificateData[fieldMappings.certificateChain] = certificateChain;
|
||||
}
|
||||
|
||||
if (caCertificate && typeof caCertificate === "string" && caCertificate.trim().length > 0) {
|
||||
certificateData[fieldMappings.caCertificate] = caCertificate;
|
||||
}
|
||||
|
||||
let targetSecretName = certName;
|
||||
if (syncOptions?.certificateNameSchema) {
|
||||
const extendedCertData = certData as Record<string, unknown>;
|
||||
const safeCommonName = typeof extendedCertData.commonName === "string" ? extendedCertData.commonName : "";
|
||||
|
||||
targetSecretName = syncOptions.certificateNameSchema
|
||||
.replace(new RE2("\\{\\{certificateId\\}\\}", "g"), certificateId)
|
||||
.replace(new RE2("\\{\\{commonName\\}\\}", "g"), safeCommonName);
|
||||
} else {
|
||||
targetSecretName = `${AWS_SECRETS_MANAGER_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX}${certificateId}`;
|
||||
}
|
||||
|
||||
const certificate = await certificateDAL.findById(certificateId);
|
||||
|
||||
if (certificate?.renewedByCertificateId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const syncRecordLookupId = certificate?.renewedFromCertificateId || certificateId;
|
||||
const existingRecord = syncRecordsByCertId.get(syncRecordLookupId);
|
||||
|
||||
let shouldProcess = true;
|
||||
let isUpdate = false;
|
||||
|
||||
if (existingRecord?.externalIdentifier) {
|
||||
const existingSecret = existingSecrets[existingRecord.externalIdentifier];
|
||||
|
||||
if (existingSecret) {
|
||||
if (certificate?.renewedFromCertificateId && preserveSecretOnRenewal) {
|
||||
targetSecretName = existingRecord.externalIdentifier;
|
||||
isUpdate = true;
|
||||
} else if (certificate?.renewedFromCertificateId && !preserveSecretOnRenewal) {
|
||||
activeExternalIdentifiers.add(existingRecord.externalIdentifier);
|
||||
} else if (!certificate?.renewedFromCertificateId) {
|
||||
activeExternalIdentifiers.add(existingRecord.externalIdentifier);
|
||||
shouldProcess = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldProcess) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (existingSecrets[targetSecretName]) {
|
||||
isUpdate = true;
|
||||
}
|
||||
|
||||
activeExternalIdentifiers.add(targetSecretName);
|
||||
|
||||
setCertificates.push({
|
||||
secretName: certName,
|
||||
certificateData,
|
||||
certificateId,
|
||||
isUpdate,
|
||||
targetSecretName,
|
||||
oldCertificateIdToRemove:
|
||||
certificate?.renewedFromCertificateId && preserveSecretOnRenewal
|
||||
? certificate.renewedFromCertificateId
|
||||
: undefined
|
||||
});
|
||||
}
|
||||
|
||||
const result: SyncCertificatesResult = {
|
||||
uploaded: 0,
|
||||
updated: 0,
|
||||
removed: 0,
|
||||
failedRemovals: 0,
|
||||
skipped: 0,
|
||||
details: {
|
||||
failedUploads: [],
|
||||
failedRemovals: [],
|
||||
validationErrors
|
||||
}
|
||||
};
|
||||
|
||||
for (const certData of setCertificates) {
|
||||
const { secretName, certificateData, certificateId, isUpdate, targetSecretName, oldCertificateIdToRemove } =
|
||||
certData;
|
||||
|
||||
try {
|
||||
const secretValue = JSON.stringify(certificateData);
|
||||
const configKeyId: unknown = awsPkiSync.destinationConfig.keyId;
|
||||
const keyId: string = typeof configKeyId === "string" ? configKeyId : "alias/aws/secretsmanager";
|
||||
|
||||
if (isUpdate) {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
client.send(
|
||||
new UpdateSecretCommand({
|
||||
SecretId: targetSecretName,
|
||||
SecretString: secretValue,
|
||||
KmsKeyId: keyId
|
||||
})
|
||||
),
|
||||
{
|
||||
operation: "update-secret",
|
||||
syncId: pkiSync.id
|
||||
}
|
||||
);
|
||||
result.updated += 1;
|
||||
} else {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
client.send(
|
||||
new CreateSecretCommand({
|
||||
Name: targetSecretName,
|
||||
SecretString: secretValue,
|
||||
KmsKeyId: keyId,
|
||||
Description: `Certificate managed by Infisical`
|
||||
})
|
||||
),
|
||||
{
|
||||
operation: "create-secret",
|
||||
syncId: pkiSync.id
|
||||
}
|
||||
);
|
||||
result.uploaded += 1;
|
||||
}
|
||||
|
||||
const existingRecord = syncRecordsByCertId.get(certificateId);
|
||||
if (existingRecord?.id) {
|
||||
await certificateSyncDAL.updateById(existingRecord.id, {
|
||||
externalIdentifier: targetSecretName,
|
||||
syncStatus: CertificateSyncStatus.Succeeded,
|
||||
lastSyncedAt: new Date(),
|
||||
lastSyncMessage: "Certificate successfully synced to AWS Secrets Manager"
|
||||
});
|
||||
|
||||
if (oldCertificateIdToRemove && oldCertificateIdToRemove !== certificateId) {
|
||||
await certificateSyncDAL.removeCertificates(pkiSync.id, [oldCertificateIdToRemove]);
|
||||
}
|
||||
} else {
|
||||
await certificateSyncDAL.addCertificates(pkiSync.id, [
|
||||
{
|
||||
certificateId,
|
||||
externalIdentifier: targetSecretName
|
||||
}
|
||||
]);
|
||||
|
||||
const newCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId);
|
||||
if (newCertSync?.id) {
|
||||
await certificateSyncDAL.updateById(newCertSync.id, {
|
||||
syncStatus: CertificateSyncStatus.Succeeded,
|
||||
lastSyncedAt: new Date(),
|
||||
lastSyncMessage: "Certificate successfully synced to AWS Secrets Manager"
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
result.details?.failedUploads?.push({
|
||||
name: secretName,
|
||||
error: parseErrorMessage(error)
|
||||
});
|
||||
logger.error(
|
||||
{
|
||||
secretName,
|
||||
certificateId,
|
||||
error: parseErrorMessage(error),
|
||||
pkiSyncId: pkiSync.id
|
||||
},
|
||||
"Failed to sync certificate"
|
||||
);
|
||||
|
||||
const existingRecord = syncRecordsByCertId.get(certificateId);
|
||||
if (existingRecord?.id) {
|
||||
await certificateSyncDAL.updateById(existingRecord.id, {
|
||||
syncStatus: CertificateSyncStatus.Failed,
|
||||
lastSyncMessage: parseErrorMessage(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (canRemoveCertificates) {
|
||||
for (const [secretName] of Object.entries(existingSecrets)) {
|
||||
if (!activeExternalIdentifiers.has(secretName)) {
|
||||
try {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
client.send(
|
||||
new DeleteSecretCommand({
|
||||
SecretId: secretName,
|
||||
ForceDeleteWithoutRecovery: true
|
||||
})
|
||||
),
|
||||
{
|
||||
operation: "delete-secret",
|
||||
syncId: pkiSync.id
|
||||
}
|
||||
);
|
||||
|
||||
result.removed += 1;
|
||||
} catch (error) {
|
||||
result.failedRemovals += 1;
|
||||
result.details?.failedRemovals?.push({
|
||||
name: secretName,
|
||||
error: parseErrorMessage(error)
|
||||
});
|
||||
logger.error(
|
||||
{
|
||||
secretName,
|
||||
error: parseErrorMessage(error),
|
||||
pkiSyncId: pkiSync.id
|
||||
},
|
||||
"Failed to remove certificate secret"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const removeCertificates = async (
|
||||
pkiSync: TPkiSyncWithCredentials,
|
||||
certificateMap: TCertificateMap
|
||||
): Promise<{ removed: number; failed: number }> => {
|
||||
const awsPkiSync = pkiSync as unknown as TAwsSecretsManagerPkiSyncWithCredentials;
|
||||
const client = await getSecretsManagerClient(awsPkiSync);
|
||||
|
||||
const existingSecrets = await $getSecretsManagerSecrets(awsPkiSync, pkiSync.id);
|
||||
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id);
|
||||
|
||||
let removed = 0;
|
||||
let failed = 0;
|
||||
|
||||
for (const [, certData] of Object.entries(certificateMap)) {
|
||||
if (!certData.certificateId) continue;
|
||||
|
||||
const syncRecord = existingSyncRecords.find((record) => record.certificateId === certData.certificateId);
|
||||
if (!syncRecord?.externalIdentifier) continue;
|
||||
|
||||
const secretName = syncRecord.externalIdentifier;
|
||||
|
||||
if (existingSecrets[secretName]) {
|
||||
try {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
client.send(
|
||||
new DeleteSecretCommand({
|
||||
SecretId: secretName,
|
||||
ForceDeleteWithoutRecovery: true
|
||||
})
|
||||
),
|
||||
{
|
||||
operation: "delete-secret",
|
||||
syncId: pkiSync.id
|
||||
}
|
||||
);
|
||||
|
||||
if (syncRecord.id) {
|
||||
await certificateSyncDAL.updateById(syncRecord.id, {
|
||||
syncStatus: CertificateSyncStatus.Failed
|
||||
});
|
||||
}
|
||||
|
||||
removed += 1;
|
||||
} catch (error) {
|
||||
failed += 1;
|
||||
logger.error(
|
||||
{
|
||||
secretName,
|
||||
certificateId: certData.certificateId,
|
||||
error: parseErrorMessage(error),
|
||||
pkiSyncId: pkiSync.id
|
||||
},
|
||||
"Failed to remove certificate secret"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { removed, failed };
|
||||
};
|
||||
|
||||
return {
|
||||
syncCertificates,
|
||||
removeCertificates
|
||||
};
|
||||
};
|
||||
|
||||
export type TAwsSecretsManagerPkiSyncFactory = ReturnType<typeof awsSecretsManagerPkiSyncFactory>;
|
||||
@@ -0,0 +1,104 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums";
|
||||
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
|
||||
import { PkiSyncSchema } from "@app/services/pki-sync/pki-sync-schemas";
|
||||
|
||||
import { AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING } from "./aws-secrets-manager-pki-sync-constants";
|
||||
|
||||
export const AwsSecretsManagerPkiSyncConfigSchema = z.object({
|
||||
region: z.nativeEnum(AWSRegion),
|
||||
keyId: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const AwsSecretsManagerFieldMappingsSchema = z.object({
|
||||
certificate: z.string().min(1, "Certificate field name is required").default("certificate"),
|
||||
privateKey: z.string().min(1, "Private key field name is required").default("private_key"),
|
||||
certificateChain: z.string().min(1, "Certificate chain field name is required").default("certificate_chain"),
|
||||
caCertificate: z.string().min(1, "CA certificate field name is required").default("ca_certificate")
|
||||
});
|
||||
|
||||
const AwsSecretsManagerPkiSyncOptionsSchema = z.object({
|
||||
canImportCertificates: z.boolean().default(false),
|
||||
canRemoveCertificates: z.boolean().default(true),
|
||||
includeRootCa: z.boolean().default(false),
|
||||
preserveSecretOnRenewal: z.boolean().default(true),
|
||||
updateExistingCertificates: z.boolean().default(true),
|
||||
certificateNameSchema: z
|
||||
.string()
|
||||
.optional()
|
||||
.refine(
|
||||
(schema) => {
|
||||
if (!schema) return true;
|
||||
|
||||
if (!schema.includes("{{certificateId}}")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const testName = schema
|
||||
.replace(new RE2("\\{\\{certificateId\\}\\}", "g"), "test-cert-id")
|
||||
.replace(new RE2("\\{\\{profileId\\}\\}", "g"), "test-profile-id")
|
||||
.replace(new RE2("\\{\\{commonName\\}\\}", "g"), "test-common-name")
|
||||
.replace(new RE2("\\{\\{friendlyName\\}\\}", "g"), "test-friendly-name")
|
||||
.replace(new RE2("\\{\\{environment\\}\\}", "g"), "test-env");
|
||||
|
||||
const hasForbiddenChars = AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS.split("").some(
|
||||
(char) => testName.includes(char)
|
||||
);
|
||||
|
||||
return (
|
||||
AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.NAME_PATTERN.test(testName) &&
|
||||
!hasForbiddenChars &&
|
||||
testName.length >= AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH &&
|
||||
testName.length <= AWS_SECRETS_MANAGER_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH
|
||||
);
|
||||
},
|
||||
{
|
||||
message:
|
||||
"Certificate name schema must include {{certificateId}} placeholder and result in names that contain only alphanumeric characters, underscores, and hyphens and be 1-512 characters long for AWS Secrets Manager."
|
||||
}
|
||||
),
|
||||
fieldMappings: AwsSecretsManagerFieldMappingsSchema.optional().default({
|
||||
certificate: "certificate",
|
||||
privateKey: "private_key",
|
||||
certificateChain: "certificate_chain",
|
||||
caCertificate: "ca_certificate"
|
||||
})
|
||||
});
|
||||
|
||||
export const AwsSecretsManagerPkiSyncSchema = PkiSyncSchema.extend({
|
||||
destination: z.literal(PkiSync.AwsSecretsManager),
|
||||
destinationConfig: AwsSecretsManagerPkiSyncConfigSchema,
|
||||
syncOptions: AwsSecretsManagerPkiSyncOptionsSchema
|
||||
});
|
||||
|
||||
export const CreateAwsSecretsManagerPkiSyncSchema = z.object({
|
||||
name: z.string().trim().min(1).max(64),
|
||||
description: z.string().optional(),
|
||||
isAutoSyncEnabled: z.boolean().default(true),
|
||||
destinationConfig: AwsSecretsManagerPkiSyncConfigSchema,
|
||||
syncOptions: AwsSecretsManagerPkiSyncOptionsSchema.optional().default({}),
|
||||
subscriberId: z.string().nullish(),
|
||||
connectionId: z.string(),
|
||||
projectId: z.string().trim().min(1),
|
||||
certificateIds: z.array(z.string().uuid()).optional()
|
||||
});
|
||||
|
||||
export const UpdateAwsSecretsManagerPkiSyncSchema = z.object({
|
||||
name: z.string().trim().min(1).max(64).optional(),
|
||||
description: z.string().optional(),
|
||||
isAutoSyncEnabled: z.boolean().optional(),
|
||||
destinationConfig: AwsSecretsManagerPkiSyncConfigSchema.optional(),
|
||||
syncOptions: AwsSecretsManagerPkiSyncOptionsSchema.optional(),
|
||||
subscriberId: z.string().nullish(),
|
||||
connectionId: z.string().optional()
|
||||
});
|
||||
|
||||
export const AwsSecretsManagerPkiSyncListItemSchema = z.object({
|
||||
name: z.literal("AWS Secrets Manager"),
|
||||
connection: z.literal(AppConnection.AWS),
|
||||
destination: z.literal(PkiSync.AwsSecretsManager),
|
||||
canImportCertificates: z.literal(false),
|
||||
canRemoveCertificates: z.literal(true)
|
||||
});
|
||||
@@ -0,0 +1,59 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types";
|
||||
|
||||
import {
|
||||
AwsSecretsManagerFieldMappingsSchema,
|
||||
AwsSecretsManagerPkiSyncConfigSchema,
|
||||
AwsSecretsManagerPkiSyncSchema,
|
||||
CreateAwsSecretsManagerPkiSyncSchema,
|
||||
UpdateAwsSecretsManagerPkiSyncSchema
|
||||
} from "./aws-secrets-manager-pki-sync-schemas";
|
||||
|
||||
export type TAwsSecretsManagerPkiSyncConfig = z.infer<typeof AwsSecretsManagerPkiSyncConfigSchema>;
|
||||
|
||||
export type TAwsSecretsManagerFieldMappings = z.infer<typeof AwsSecretsManagerFieldMappingsSchema>;
|
||||
|
||||
export type TAwsSecretsManagerPkiSync = z.infer<typeof AwsSecretsManagerPkiSyncSchema>;
|
||||
|
||||
export type TAwsSecretsManagerPkiSyncInput = z.infer<typeof CreateAwsSecretsManagerPkiSyncSchema>;
|
||||
|
||||
export type TAwsSecretsManagerPkiSyncUpdate = z.infer<typeof UpdateAwsSecretsManagerPkiSyncSchema>;
|
||||
|
||||
export type TAwsSecretsManagerPkiSyncWithCredentials = TAwsSecretsManagerPkiSync & {
|
||||
connection: TAwsConnection;
|
||||
appConnectionName: string;
|
||||
appConnectionApp: string;
|
||||
};
|
||||
|
||||
export interface AwsSecretsManagerCertificateSecret {
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
export interface SyncCertificatesResult {
|
||||
uploaded: number;
|
||||
updated: number;
|
||||
removed: number;
|
||||
failedRemovals: number;
|
||||
skipped: number;
|
||||
details?: {
|
||||
failedUploads?: Array<{ name: string; error: string }>;
|
||||
failedRemovals?: Array<{ name: string; error: string }>;
|
||||
validationErrors?: Array<{ name: string; error: string }>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface RemoveCertificatesResult {
|
||||
removed: number;
|
||||
failed: number;
|
||||
skipped: number;
|
||||
}
|
||||
|
||||
export interface CertificateImportRequest {
|
||||
name: string;
|
||||
certificate: string;
|
||||
privateKey: string;
|
||||
certificateChain?: string;
|
||||
caCertificate?: string;
|
||||
certificateId?: string;
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
export * from "./aws-secrets-manager-pki-sync-constants";
|
||||
export * from "./aws-secrets-manager-pki-sync-fns";
|
||||
export * from "./aws-secrets-manager-pki-sync-schemas";
|
||||
export * from "./aws-secrets-manager-pki-sync-types";
|
||||
@@ -14,6 +14,7 @@ export const AzureKeyVaultPkiSyncConfigSchema = z.object({
|
||||
const AzureKeyVaultPkiSyncOptionsSchema = z.object({
|
||||
canImportCertificates: z.boolean().default(false),
|
||||
canRemoveCertificates: z.boolean().default(true),
|
||||
includeRootCa: z.boolean().default(false),
|
||||
enableVersioning: z.boolean().default(true),
|
||||
certificateNameSchema: z
|
||||
.string()
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import RE2 from "re2";
|
||||
|
||||
export const CHEF_PKI_SYNC_CERTIFICATE_NAMING = {
|
||||
NAME_PATTERN: new RE2("^[a-zA-Z0-9_-]+$"),
|
||||
FORBIDDEN_CHARACTERS: "[]{}()<>|\\:;\"'=+*&^%$#@!~`?/",
|
||||
MIN_LENGTH: 1,
|
||||
MAX_LENGTH: 255,
|
||||
DEFAULT_SCHEMA: "{{certificateId}}"
|
||||
};
|
||||
|
||||
export const CHEF_PKI_SYNC_DATA_BAG_NAMING = {
|
||||
NAME_PATTERN: new RE2("^[a-zA-Z0-9_-]+$"),
|
||||
FORBIDDEN_CHARACTERS: "[]{}()<>|\\:;\"'=+*&^%$#@!~`?/.",
|
||||
MIN_LENGTH: 1,
|
||||
MAX_LENGTH: 255
|
||||
};
|
||||
|
||||
export const CHEF_PKI_SYNC_DEFAULTS = {
|
||||
CERTIFICATE_DATA_BAG: "ssl_certificates",
|
||||
ITEM_NAME_TEMPLATE: "{{certificateId}}",
|
||||
INFISICAL_PREFIX: "Infisical-",
|
||||
DEFAULT_ENVIRONMENT: "global"
|
||||
} as const;
|
||||
595
backend/src/services/pki-sync/chef/chef-pki-sync-fns.ts
Normal file
595
backend/src/services/pki-sync/chef/chef-pki-sync-fns.ts
Normal file
@@ -0,0 +1,595 @@
|
||||
/* eslint-disable no-continue */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { TCertificateSyncs } from "@app/db/schemas";
|
||||
import {
|
||||
createChefDataBagItem,
|
||||
listChefDataBagItems,
|
||||
removeChefDataBagItem,
|
||||
updateChefDataBagItem
|
||||
} from "@app/ee/services/app-connections/chef";
|
||||
import { TChefDataBagItemContent } from "@app/ee/services/secret-sync/chef";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateSyncDALFactory } from "@app/services/certificate-sync/certificate-sync-dal";
|
||||
import { CertificateSyncStatus } from "@app/services/certificate-sync/certificate-sync-enums";
|
||||
import { createConnectionQueue, RateLimitConfig } from "@app/services/connection-queue";
|
||||
import { matchesCertificateNameSchema } from "@app/services/pki-sync/pki-sync-fns";
|
||||
import { TCertificateMap, TPkiSyncWithCredentials } from "@app/services/pki-sync/pki-sync-types";
|
||||
|
||||
import { CHEF_PKI_SYNC_DEFAULTS } from "./chef-pki-sync-constants";
|
||||
import { ChefCertificateDataBagItem, SyncCertificatesResult, TChefPkiSyncWithCredentials } from "./chef-pki-sync-types";
|
||||
|
||||
const CHEF_RATE_LIMIT_CONFIG: RateLimitConfig = {
|
||||
MAX_CONCURRENT_REQUESTS: 5, // Chef servers generally have lower rate limits
|
||||
BASE_DELAY: 1500,
|
||||
MAX_DELAY: 30000,
|
||||
MAX_RETRIES: 3,
|
||||
RATE_LIMIT_STATUS_CODES: [429, 503]
|
||||
};
|
||||
|
||||
const chefConnectionQueue = createConnectionQueue(CHEF_RATE_LIMIT_CONFIG);
|
||||
const { withRateLimitRetry } = chefConnectionQueue;
|
||||
|
||||
const isInfisicalManagedCertificate = (certificateName: string, pkiSync: TPkiSyncWithCredentials): boolean => {
|
||||
const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined;
|
||||
const certificateNameSchema = syncOptions?.certificateNameSchema;
|
||||
|
||||
if (certificateNameSchema) {
|
||||
const environment = CHEF_PKI_SYNC_DEFAULTS.DEFAULT_ENVIRONMENT;
|
||||
return matchesCertificateNameSchema(certificateName, environment, certificateNameSchema);
|
||||
}
|
||||
|
||||
return certificateName.startsWith(CHEF_PKI_SYNC_DEFAULTS.INFISICAL_PREFIX);
|
||||
};
|
||||
|
||||
const parseErrorMessage = (error: unknown): string => {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (typeof error === "string") {
|
||||
return error;
|
||||
}
|
||||
|
||||
if (error && typeof error === "object" && "message" in error) {
|
||||
const { message } = error as { message: unknown };
|
||||
if (typeof message === "string") {
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
||||
return "Unknown error occurred";
|
||||
};
|
||||
|
||||
type TChefPkiSyncFactoryDeps = {
|
||||
certificateDAL: Pick<TCertificateDALFactory, "findById">;
|
||||
certificateSyncDAL: Pick<
|
||||
TCertificateSyncDALFactory,
|
||||
| "removeCertificates"
|
||||
| "addCertificates"
|
||||
| "findByPkiSyncAndCertificate"
|
||||
| "updateById"
|
||||
| "findByPkiSyncId"
|
||||
| "updateSyncStatus"
|
||||
>;
|
||||
};
|
||||
|
||||
export const chefPkiSyncFactory = ({ certificateDAL, certificateSyncDAL }: TChefPkiSyncFactoryDeps) => {
|
||||
const $getChefDataBagItems = async (
|
||||
pkiSync: TChefPkiSyncWithCredentials,
|
||||
syncId = "unknown"
|
||||
): Promise<Record<string, boolean>> => {
|
||||
const {
|
||||
connection,
|
||||
destinationConfig: { dataBagName }
|
||||
} = pkiSync;
|
||||
const { serverUrl, userName, privateKey, orgName } = connection.credentials;
|
||||
|
||||
const dataBagItems = await withRateLimitRetry(
|
||||
() =>
|
||||
listChefDataBagItems(
|
||||
{
|
||||
credentials: { serverUrl, userName, privateKey, orgName }
|
||||
} as Parameters<typeof listChefDataBagItems>[0],
|
||||
dataBagName
|
||||
),
|
||||
{
|
||||
operation: "list-chef-data-bag-items",
|
||||
syncId
|
||||
}
|
||||
);
|
||||
|
||||
const chefDataBagItems: Record<string, boolean> = {};
|
||||
dataBagItems.forEach((item) => {
|
||||
chefDataBagItems[item.name] = true;
|
||||
});
|
||||
|
||||
return chefDataBagItems;
|
||||
};
|
||||
|
||||
const syncCertificates = async (
|
||||
pkiSync: TPkiSyncWithCredentials,
|
||||
certificateMap: TCertificateMap
|
||||
): Promise<SyncCertificatesResult> => {
|
||||
const chefPkiSync = pkiSync as unknown as TChefPkiSyncWithCredentials;
|
||||
const {
|
||||
connection,
|
||||
destinationConfig: { dataBagName }
|
||||
} = chefPkiSync;
|
||||
const { serverUrl, userName, privateKey, orgName } = connection.credentials;
|
||||
|
||||
const chefDataBagItems = await $getChefDataBagItems(chefPkiSync, pkiSync.id);
|
||||
|
||||
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id);
|
||||
const syncRecordsByCertId = new Map<string, TCertificateSyncs>();
|
||||
const syncRecordsByExternalId = new Map<string, TCertificateSyncs>();
|
||||
|
||||
existingSyncRecords.forEach((record: TCertificateSyncs) => {
|
||||
if (record.certificateId) {
|
||||
syncRecordsByCertId.set(record.certificateId, record);
|
||||
}
|
||||
if (record.externalIdentifier) {
|
||||
syncRecordsByExternalId.set(record.externalIdentifier, record);
|
||||
}
|
||||
});
|
||||
|
||||
type CertificateUploadData = {
|
||||
key: string;
|
||||
name: string;
|
||||
cert: string;
|
||||
privateKey: string;
|
||||
certificateChain?: string;
|
||||
caCertificate?: string;
|
||||
certificateId: string;
|
||||
isUpdate: boolean;
|
||||
targetItemName: string;
|
||||
oldCertificateIdToRemove?: string;
|
||||
};
|
||||
|
||||
const setCertificates: CertificateUploadData[] = [];
|
||||
|
||||
const validationErrors: Array<{ name: string; error: string }> = [];
|
||||
|
||||
const syncOptions = pkiSync.syncOptions as
|
||||
| {
|
||||
canRemoveCertificates?: boolean;
|
||||
preserveItemOnRenewal?: boolean;
|
||||
fieldMappings?: {
|
||||
certificate?: string;
|
||||
privateKey?: string;
|
||||
certificateChain?: string;
|
||||
caCertificate?: string;
|
||||
metadata?: string;
|
||||
};
|
||||
}
|
||||
| undefined;
|
||||
const canRemoveCertificates = syncOptions?.canRemoveCertificates ?? true;
|
||||
const preserveItemOnRenewal = syncOptions?.preserveItemOnRenewal ?? true;
|
||||
|
||||
const fieldMappings = {
|
||||
certificate: syncOptions?.fieldMappings?.certificate ?? "certificate",
|
||||
privateKey: syncOptions?.fieldMappings?.privateKey ?? "private_key",
|
||||
certificateChain: syncOptions?.fieldMappings?.certificateChain ?? "certificate_chain",
|
||||
caCertificate: syncOptions?.fieldMappings?.caCertificate ?? "ca_certificate"
|
||||
};
|
||||
|
||||
const activeExternalIdentifiers = new Set<string>();
|
||||
|
||||
for (const [certName, certData] of Object.entries(certificateMap)) {
|
||||
const { cert, privateKey: certPrivateKey, certificateChain, caCertificate, certificateId } = certData;
|
||||
|
||||
if (!cert || cert.trim().length === 0) {
|
||||
validationErrors.push({
|
||||
name: certName,
|
||||
error: "Certificate content is empty or missing"
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!certPrivateKey || certPrivateKey.trim().length === 0) {
|
||||
validationErrors.push({
|
||||
name: certName,
|
||||
error: "Private key content is empty or missing"
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!certificateId || typeof certificateId !== "string") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const targetCertificateName = certName;
|
||||
|
||||
const certificate = await certificateDAL.findById(certificateId);
|
||||
|
||||
if (certificate?.renewedByCertificateId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const syncRecordLookupId = certificate?.renewedFromCertificateId || certificateId;
|
||||
const existingSyncRecord = syncRecordsByCertId.get(syncRecordLookupId);
|
||||
|
||||
let shouldProcess = true;
|
||||
let isUpdate = false;
|
||||
let targetItemName = targetCertificateName;
|
||||
|
||||
if (existingSyncRecord?.externalIdentifier) {
|
||||
const existingChefItem = chefDataBagItems[existingSyncRecord.externalIdentifier];
|
||||
|
||||
if (existingChefItem) {
|
||||
if (certificate?.renewedFromCertificateId && preserveItemOnRenewal) {
|
||||
targetItemName = existingSyncRecord.externalIdentifier;
|
||||
isUpdate = true;
|
||||
} else if (!certificate?.renewedFromCertificateId) {
|
||||
shouldProcess = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldProcess) {
|
||||
continue;
|
||||
}
|
||||
|
||||
setCertificates.push({
|
||||
key: certName,
|
||||
name: certName,
|
||||
cert,
|
||||
privateKey: certPrivateKey,
|
||||
certificateChain,
|
||||
caCertificate,
|
||||
certificateId,
|
||||
isUpdate,
|
||||
targetItemName,
|
||||
oldCertificateIdToRemove:
|
||||
certificate?.renewedFromCertificateId && preserveItemOnRenewal
|
||||
? certificate.renewedFromCertificateId
|
||||
: undefined
|
||||
});
|
||||
|
||||
activeExternalIdentifiers.add(targetItemName);
|
||||
}
|
||||
|
||||
type UploadResult =
|
||||
| { status: "fulfilled"; certificate: CertificateUploadData }
|
||||
| { status: "rejected"; certificate: CertificateUploadData; error: unknown };
|
||||
|
||||
const uploadPromises = setCertificates.map(async (certificateData): Promise<UploadResult> => {
|
||||
const {
|
||||
targetItemName,
|
||||
cert,
|
||||
privateKey: certPrivateKey,
|
||||
certificateChain,
|
||||
caCertificate,
|
||||
certificateId
|
||||
} = certificateData;
|
||||
|
||||
try {
|
||||
const chefDataBagItem: ChefCertificateDataBagItem = {
|
||||
id: targetItemName,
|
||||
[fieldMappings.certificate]: cert,
|
||||
[fieldMappings.privateKey]: certPrivateKey,
|
||||
...(certificateChain && { [fieldMappings.certificateChain]: certificateChain }),
|
||||
...(caCertificate && { [fieldMappings.caCertificate]: caCertificate })
|
||||
};
|
||||
|
||||
const itemExists = chefDataBagItems[targetItemName] === true;
|
||||
|
||||
if (itemExists) {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
updateChefDataBagItem({
|
||||
serverUrl,
|
||||
userName,
|
||||
privateKey,
|
||||
orgName,
|
||||
dataBagName,
|
||||
dataBagItemName: targetItemName,
|
||||
data: chefDataBagItem as unknown as TChefDataBagItemContent
|
||||
}),
|
||||
{
|
||||
operation: "update-chef-data-bag-item",
|
||||
syncId: pkiSync.id
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
createChefDataBagItem({
|
||||
serverUrl,
|
||||
userName,
|
||||
privateKey,
|
||||
orgName,
|
||||
dataBagName,
|
||||
data: chefDataBagItem as unknown as TChefDataBagItemContent
|
||||
}),
|
||||
{
|
||||
operation: "create-chef-data-bag-item",
|
||||
syncId: pkiSync.id
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return { status: "fulfilled" as const, certificate: certificateData };
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{
|
||||
syncId: pkiSync.id,
|
||||
certificateId,
|
||||
targetItemName,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
},
|
||||
"Failed to sync certificate to Chef"
|
||||
);
|
||||
return { status: "rejected" as const, certificate: certificateData, error };
|
||||
}
|
||||
});
|
||||
|
||||
const uploadResults = await Promise.allSettled(uploadPromises);
|
||||
|
||||
const successfulUploads = uploadResults.filter(
|
||||
(result): result is PromiseFulfilledResult<UploadResult> =>
|
||||
result.status === "fulfilled" && result.value.status === "fulfilled"
|
||||
);
|
||||
const failedUploads = uploadResults.filter(
|
||||
(
|
||||
result
|
||||
): result is
|
||||
| PromiseRejectedResult
|
||||
| PromiseFulfilledResult<{ status: "rejected"; certificate: CertificateUploadData; error: unknown }> =>
|
||||
result.status === "rejected" || (result.status === "fulfilled" && result.value.status === "rejected")
|
||||
);
|
||||
|
||||
let removedCount = 0;
|
||||
let failedRemovals: Array<{ name: string; error: string }> = [];
|
||||
|
||||
if (canRemoveCertificates) {
|
||||
const itemsToRemove: string[] = [];
|
||||
|
||||
Object.keys(chefDataBagItems).forEach((itemName) => {
|
||||
if (!activeExternalIdentifiers.has(itemName) && isInfisicalManagedCertificate(itemName, pkiSync)) {
|
||||
itemsToRemove.push(itemName);
|
||||
}
|
||||
});
|
||||
|
||||
if (itemsToRemove.length > 0) {
|
||||
const removalPromises = itemsToRemove.map(async (itemName) => {
|
||||
try {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
removeChefDataBagItem({
|
||||
serverUrl,
|
||||
userName,
|
||||
privateKey,
|
||||
orgName,
|
||||
dataBagName,
|
||||
dataBagItemName: itemName
|
||||
}),
|
||||
{
|
||||
operation: "remove-chef-data-bag-item",
|
||||
syncId: pkiSync.id
|
||||
}
|
||||
);
|
||||
|
||||
const syncRecord = syncRecordsByExternalId.get(itemName);
|
||||
if (syncRecord?.certificateId) {
|
||||
await certificateSyncDAL.removeCertificates(pkiSync.id, [syncRecord.certificateId]);
|
||||
}
|
||||
|
||||
return { status: "fulfilled" as const, itemName };
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{
|
||||
syncId: pkiSync.id,
|
||||
itemName,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
},
|
||||
"Failed to remove Chef data bag item"
|
||||
);
|
||||
return { status: "rejected" as const, itemName, error };
|
||||
}
|
||||
});
|
||||
|
||||
const removalResults = await Promise.allSettled(removalPromises);
|
||||
|
||||
const successfulRemovals = removalResults.filter(
|
||||
(result): result is PromiseFulfilledResult<{ status: "fulfilled"; itemName: string }> =>
|
||||
result.status === "fulfilled" && result.value.status === "fulfilled"
|
||||
);
|
||||
removedCount = successfulRemovals.length;
|
||||
|
||||
const failedRemovalPromises = removalResults.filter(
|
||||
(
|
||||
result
|
||||
): result is
|
||||
| PromiseRejectedResult
|
||||
| PromiseFulfilledResult<{ status: "rejected"; itemName: string; error: unknown }> =>
|
||||
result.status === "rejected" || (result.status === "fulfilled" && result.value.status === "rejected")
|
||||
);
|
||||
|
||||
failedRemovals = failedRemovalPromises.map((result) => {
|
||||
if (result.status === "rejected") {
|
||||
return {
|
||||
name: "unknown",
|
||||
error: parseErrorMessage(result.reason)
|
||||
};
|
||||
}
|
||||
const { itemName, error } = result.value;
|
||||
return {
|
||||
name: String(itemName),
|
||||
error: parseErrorMessage(error)
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const result of successfulUploads) {
|
||||
const { certificateId, targetItemName, oldCertificateIdToRemove } = result.value.certificate;
|
||||
|
||||
if (certificateId && typeof certificateId === "string") {
|
||||
const existingCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId);
|
||||
if (existingCertSync) {
|
||||
await certificateSyncDAL.updateById(existingCertSync.id, {
|
||||
externalIdentifier: targetItemName,
|
||||
syncStatus: CertificateSyncStatus.Succeeded,
|
||||
lastSyncedAt: new Date(),
|
||||
lastSyncMessage: "Certificate successfully synced to destination"
|
||||
});
|
||||
} else {
|
||||
await certificateSyncDAL.addCertificates(pkiSync.id, [
|
||||
{
|
||||
certificateId,
|
||||
externalIdentifier: targetItemName
|
||||
}
|
||||
]);
|
||||
|
||||
const newCertSync = await certificateSyncDAL.findByPkiSyncAndCertificate(pkiSync.id, certificateId);
|
||||
if (newCertSync) {
|
||||
await certificateSyncDAL.updateById(newCertSync.id, {
|
||||
syncStatus: CertificateSyncStatus.Succeeded,
|
||||
lastSyncedAt: new Date(),
|
||||
lastSyncMessage: "Certificate successfully synced to destination"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (oldCertificateIdToRemove) {
|
||||
await certificateSyncDAL.removeCertificates(pkiSync.id, [oldCertificateIdToRemove]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
failedUploads.map(async (result) => {
|
||||
let certificateId: string;
|
||||
let errorMessage: string;
|
||||
|
||||
if (result.status === "rejected") {
|
||||
certificateId = "unknown";
|
||||
errorMessage = result.reason instanceof Error ? result.reason.message : String(result.reason);
|
||||
return;
|
||||
}
|
||||
|
||||
const { certificate, error } = result.value;
|
||||
certificateId = certificate.certificateId;
|
||||
errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
const existingSyncRecord = syncRecordsByCertId.get(certificateId);
|
||||
if (existingSyncRecord) {
|
||||
await certificateSyncDAL.updateSyncStatus(
|
||||
pkiSync.id,
|
||||
certificateId,
|
||||
CertificateSyncStatus.Failed,
|
||||
errorMessage
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
uploaded: successfulUploads.filter((result) => !result.value.certificate.isUpdate).length,
|
||||
updated: successfulUploads.filter((result) => result.value.certificate.isUpdate).length,
|
||||
removed: removedCount,
|
||||
failedRemovals: failedRemovals.length,
|
||||
skipped: validationErrors.length,
|
||||
details: {
|
||||
failedUploads: failedUploads.map((result) => {
|
||||
if (result.status === "rejected") {
|
||||
return {
|
||||
name: "unknown",
|
||||
error: result.reason instanceof Error ? result.reason.message : String(result.reason)
|
||||
};
|
||||
}
|
||||
const { certificate, error } = result.value;
|
||||
return {
|
||||
name: certificate.name,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}),
|
||||
failedRemovals,
|
||||
validationErrors
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const importCertificates = async (): Promise<SyncCertificatesResult> => {
|
||||
throw new Error("Chef PKI Sync does not support importing certificates from Chef data bags");
|
||||
};
|
||||
|
||||
const removeCertificates = async (
|
||||
sync: TPkiSyncWithCredentials,
|
||||
certificateNames: string[],
|
||||
deps?: { certificateSyncDAL?: TCertificateSyncDALFactory; certificateMap?: TCertificateMap }
|
||||
): Promise<void> => {
|
||||
const chefPkiSync = sync as unknown as TChefPkiSyncWithCredentials;
|
||||
const {
|
||||
connection,
|
||||
destinationConfig: { dataBagName }
|
||||
} = chefPkiSync;
|
||||
const { serverUrl, userName, privateKey, orgName } = connection.credentials;
|
||||
|
||||
const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(sync.id);
|
||||
const certificateIdsToRemove: string[] = [];
|
||||
const itemsToRemove: string[] = [];
|
||||
|
||||
for (const certName of certificateNames) {
|
||||
const certificateData = deps?.certificateMap?.[certName];
|
||||
if (certificateData?.certificateId && typeof certificateData.certificateId === "string") {
|
||||
const syncRecord = existingSyncRecords.find((record) => record.certificateId === certificateData.certificateId);
|
||||
if (syncRecord) {
|
||||
certificateIdsToRemove.push(certificateData.certificateId);
|
||||
if (syncRecord.externalIdentifier) {
|
||||
itemsToRemove.push(syncRecord.externalIdentifier);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const targetName = certName;
|
||||
const syncRecord = existingSyncRecords.find((record) => record.externalIdentifier === targetName);
|
||||
if (syncRecord && syncRecord.certificateId) {
|
||||
certificateIdsToRemove.push(syncRecord.certificateId);
|
||||
itemsToRemove.push(targetName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const removalPromises = itemsToRemove.map(async (itemName) => {
|
||||
try {
|
||||
await withRateLimitRetry(
|
||||
() =>
|
||||
removeChefDataBagItem({
|
||||
serverUrl,
|
||||
userName,
|
||||
privateKey,
|
||||
orgName,
|
||||
dataBagName,
|
||||
dataBagItemName: itemName
|
||||
}),
|
||||
{
|
||||
operation: "remove-chef-data-bag-item",
|
||||
syncId: sync.id
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{
|
||||
syncId: sync.id,
|
||||
itemName,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
},
|
||||
"Failed to remove Chef data bag item during certificate removal"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(removalPromises);
|
||||
|
||||
if (certificateIdsToRemove.length > 0) {
|
||||
await certificateSyncDAL.removeCertificates(sync.id, certificateIdsToRemove);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
syncCertificates,
|
||||
importCertificates,
|
||||
removeCertificates
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,10 @@
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
|
||||
|
||||
export const CHEF_PKI_SYNC_LIST_OPTION = {
|
||||
name: "Chef" as const,
|
||||
connection: AppConnection.Chef,
|
||||
destination: PkiSync.Chef,
|
||||
canImportCertificates: false,
|
||||
canRemoveCertificates: true
|
||||
} as const;
|
||||
113
backend/src/services/pki-sync/chef/chef-pki-sync-schemas.ts
Normal file
113
backend/src/services/pki-sync/chef/chef-pki-sync-schemas.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { PkiSync } from "@app/services/pki-sync/pki-sync-enums";
|
||||
import { PkiSyncSchema } from "@app/services/pki-sync/pki-sync-schemas";
|
||||
|
||||
import { CHEF_PKI_SYNC_CERTIFICATE_NAMING, CHEF_PKI_SYNC_DATA_BAG_NAMING } from "./chef-pki-sync-constants";
|
||||
|
||||
export const ChefPkiSyncConfigSchema = z.object({
|
||||
dataBagName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Data bag name required")
|
||||
.max(255, "Data bag name cannot exceed 255 characters")
|
||||
.refine(
|
||||
(name) => CHEF_PKI_SYNC_DATA_BAG_NAMING.NAME_PATTERN.test(name),
|
||||
"Data bag name can only contain alphanumeric characters, underscores, and hyphens"
|
||||
)
|
||||
});
|
||||
|
||||
const ChefFieldMappingsSchema = z.object({
|
||||
certificate: z.string().min(1, "Certificate field name is required").default("certificate"),
|
||||
privateKey: z.string().min(1, "Private key field name is required").default("private_key"),
|
||||
certificateChain: z.string().min(1, "Certificate chain field name is required").default("certificate_chain"),
|
||||
caCertificate: z.string().min(1, "CA certificate field name is required").default("ca_certificate")
|
||||
});
|
||||
|
||||
const ChefPkiSyncOptionsSchema = z.object({
|
||||
canImportCertificates: z.boolean().default(false),
|
||||
canRemoveCertificates: z.boolean().default(true),
|
||||
includeRootCa: z.boolean().default(false),
|
||||
preserveItemOnRenewal: z.boolean().default(true),
|
||||
updateExistingCertificates: z.boolean().default(true),
|
||||
certificateNameSchema: z
|
||||
.string()
|
||||
.optional()
|
||||
.refine(
|
||||
(schema) => {
|
||||
if (!schema) return true;
|
||||
|
||||
if (!schema.includes("{{certificateId}}")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const testName = schema
|
||||
.replace(new RE2("\\{\\{certificateId\\}\\}", "g"), "test-cert-id")
|
||||
.replace(new RE2("\\{\\{profileId\\}\\}", "g"), "test-profile-id")
|
||||
.replace(new RE2("\\{\\{commonName\\}\\}", "g"), "test-common-name")
|
||||
.replace(new RE2("\\{\\{friendlyName\\}\\}", "g"), "test-friendly-name")
|
||||
.replace(new RE2("\\{\\{environment\\}\\}", "g"), "test-env");
|
||||
|
||||
const hasForbiddenChars = CHEF_PKI_SYNC_CERTIFICATE_NAMING.FORBIDDEN_CHARACTERS.split("").some((char) =>
|
||||
testName.includes(char)
|
||||
);
|
||||
|
||||
return (
|
||||
CHEF_PKI_SYNC_CERTIFICATE_NAMING.NAME_PATTERN.test(testName) &&
|
||||
!hasForbiddenChars &&
|
||||
testName.length >= CHEF_PKI_SYNC_CERTIFICATE_NAMING.MIN_LENGTH &&
|
||||
testName.length <= CHEF_PKI_SYNC_CERTIFICATE_NAMING.MAX_LENGTH
|
||||
);
|
||||
},
|
||||
{
|
||||
message:
|
||||
"Certificate item name schema must include {{certificateId}} placeholder and result in names that contain only alphanumeric characters, underscores, and hyphens and be 1-255 characters long for Chef data bag items."
|
||||
}
|
||||
),
|
||||
fieldMappings: ChefFieldMappingsSchema.optional().default({
|
||||
certificate: "certificate",
|
||||
privateKey: "private_key",
|
||||
certificateChain: "certificate_chain",
|
||||
caCertificate: "ca_certificate"
|
||||
})
|
||||
});
|
||||
|
||||
export const ChefPkiSyncSchema = PkiSyncSchema.extend({
|
||||
destination: z.literal(PkiSync.Chef),
|
||||
destinationConfig: ChefPkiSyncConfigSchema,
|
||||
syncOptions: ChefPkiSyncOptionsSchema
|
||||
});
|
||||
|
||||
export const CreateChefPkiSyncSchema = z.object({
|
||||
name: z.string().trim().min(1).max(64),
|
||||
description: z.string().optional(),
|
||||
isAutoSyncEnabled: z.boolean().default(true),
|
||||
destinationConfig: ChefPkiSyncConfigSchema,
|
||||
syncOptions: ChefPkiSyncOptionsSchema.optional().default({}),
|
||||
subscriberId: z.string().nullish(),
|
||||
connectionId: z.string(),
|
||||
projectId: z.string().trim().min(1),
|
||||
certificateIds: z.array(z.string().uuid()).optional()
|
||||
});
|
||||
|
||||
export const UpdateChefPkiSyncSchema = z.object({
|
||||
name: z.string().trim().min(1).max(64).optional(),
|
||||
description: z.string().optional(),
|
||||
isAutoSyncEnabled: z.boolean().optional(),
|
||||
destinationConfig: ChefPkiSyncConfigSchema.optional(),
|
||||
syncOptions: ChefPkiSyncOptionsSchema.optional(),
|
||||
subscriberId: z.string().nullish(),
|
||||
connectionId: z.string().optional()
|
||||
});
|
||||
|
||||
export const ChefPkiSyncListItemSchema = z.object({
|
||||
name: z.literal("Chef"),
|
||||
connection: z.literal(AppConnection.Chef),
|
||||
destination: z.literal(PkiSync.Chef),
|
||||
canImportCertificates: z.literal(false),
|
||||
canRemoveCertificates: z.literal(true)
|
||||
});
|
||||
|
||||
export { ChefFieldMappingsSchema };
|
||||
59
backend/src/services/pki-sync/chef/chef-pki-sync-types.ts
Normal file
59
backend/src/services/pki-sync/chef/chef-pki-sync-types.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { TChefConnection } from "@app/ee/services/app-connections/chef/chef-connection-types";
|
||||
|
||||
import {
|
||||
ChefFieldMappingsSchema,
|
||||
ChefPkiSyncConfigSchema,
|
||||
ChefPkiSyncSchema,
|
||||
CreateChefPkiSyncSchema,
|
||||
UpdateChefPkiSyncSchema
|
||||
} from "./chef-pki-sync-schemas";
|
||||
|
||||
export type TChefPkiSyncConfig = z.infer<typeof ChefPkiSyncConfigSchema>;
|
||||
|
||||
export type TChefFieldMappings = z.infer<typeof ChefFieldMappingsSchema>;
|
||||
|
||||
export type TChefPkiSync = z.infer<typeof ChefPkiSyncSchema>;
|
||||
|
||||
export type TChefPkiSyncInput = z.infer<typeof CreateChefPkiSyncSchema>;
|
||||
|
||||
export type TChefPkiSyncUpdate = z.infer<typeof UpdateChefPkiSyncSchema>;
|
||||
|
||||
export type TChefPkiSyncWithCredentials = TChefPkiSync & {
|
||||
connection: TChefConnection;
|
||||
};
|
||||
|
||||
export interface ChefCertificateDataBagItem {
|
||||
id: string;
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
export interface SyncCertificatesResult {
|
||||
uploaded: number;
|
||||
updated: number;
|
||||
removed: number;
|
||||
failedRemovals: number;
|
||||
skipped: number;
|
||||
details?: {
|
||||
failedUploads?: Array<{ name: string; error: string }>;
|
||||
failedRemovals?: Array<{ name: string; error: string }>;
|
||||
validationErrors?: Array<{ name: string; error: string }>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface RemoveCertificatesResult {
|
||||
removed: number;
|
||||
failed: number;
|
||||
skipped: number;
|
||||
}
|
||||
|
||||
export interface CertificateImportRequest {
|
||||
id: string;
|
||||
name: string;
|
||||
certificate: string;
|
||||
privateKey: string;
|
||||
certificateChain?: string;
|
||||
alternativeNames?: string[];
|
||||
certificateId?: string;
|
||||
}
|
||||
4
backend/src/services/pki-sync/chef/index.ts
Normal file
4
backend/src/services/pki-sync/chef/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./chef-pki-sync-constants";
|
||||
export * from "./chef-pki-sync-fns";
|
||||
export * from "./chef-pki-sync-schemas";
|
||||
export * from "./chef-pki-sync-types";
|
||||
@@ -1,6 +1,8 @@
|
||||
export enum PkiSync {
|
||||
AzureKeyVault = "azure-key-vault",
|
||||
AwsCertificateManager = "aws-certificate-manager"
|
||||
AwsCertificateManager = "aws-certificate-manager",
|
||||
AwsSecretsManager = "aws-secrets-manager",
|
||||
Chef = "chef"
|
||||
}
|
||||
|
||||
export enum PkiSyncStatus {
|
||||
|
||||
@@ -10,8 +10,12 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION } from "./aws-certificate-manager/aws-certificate-manager-pki-sync-constants";
|
||||
import { awsCertificateManagerPkiSyncFactory } from "./aws-certificate-manager/aws-certificate-manager-pki-sync-fns";
|
||||
import { AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION } from "./aws-secrets-manager/aws-secrets-manager-pki-sync-constants";
|
||||
import { awsSecretsManagerPkiSyncFactory } from "./aws-secrets-manager/aws-secrets-manager-pki-sync-fns";
|
||||
import { AZURE_KEY_VAULT_PKI_SYNC_LIST_OPTION } from "./azure-key-vault/azure-key-vault-pki-sync-constants";
|
||||
import { azureKeyVaultPkiSyncFactory } from "./azure-key-vault/azure-key-vault-pki-sync-fns";
|
||||
import { chefPkiSyncFactory } from "./chef/chef-pki-sync-fns";
|
||||
import { CHEF_PKI_SYNC_LIST_OPTION } from "./chef/chef-pki-sync-list-constants";
|
||||
import { PkiSync } from "./pki-sync-enums";
|
||||
import { TCertificateMap, TPkiSyncWithCredentials } from "./pki-sync-types";
|
||||
|
||||
@@ -19,7 +23,9 @@ const ENTERPRISE_PKI_SYNCS: PkiSync[] = [];
|
||||
|
||||
const PKI_SYNC_LIST_OPTIONS = {
|
||||
[PkiSync.AzureKeyVault]: AZURE_KEY_VAULT_PKI_SYNC_LIST_OPTION,
|
||||
[PkiSync.AwsCertificateManager]: AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION
|
||||
[PkiSync.AwsCertificateManager]: AWS_CERTIFICATE_MANAGER_PKI_SYNC_LIST_OPTION,
|
||||
[PkiSync.AwsSecretsManager]: AWS_SECRETS_MANAGER_PKI_SYNC_LIST_OPTION,
|
||||
[PkiSync.Chef]: CHEF_PKI_SYNC_LIST_OPTION
|
||||
};
|
||||
|
||||
export const enterprisePkiSyncCheck = async (
|
||||
@@ -162,6 +168,8 @@ export const PkiSyncFns = {
|
||||
dependencies: {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
certificateDAL: TCertificateDALFactory;
|
||||
certificateSyncDAL: TCertificateSyncDALFactory;
|
||||
}
|
||||
): Promise<TCertificateMap> => {
|
||||
switch (pkiSync.destination) {
|
||||
@@ -175,6 +183,14 @@ export const PkiSyncFns = {
|
||||
"AWS Certificate Manager does not support importing certificates into Infisical (private keys cannot be extracted)"
|
||||
);
|
||||
}
|
||||
case PkiSync.AwsSecretsManager: {
|
||||
throw new Error("AWS Secrets Manager does not support importing certificates into Infisical");
|
||||
}
|
||||
case PkiSync.Chef: {
|
||||
throw new Error(
|
||||
"Chef does not support importing certificates into Infisical (private keys cannot be extracted securely)"
|
||||
);
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`);
|
||||
}
|
||||
@@ -203,7 +219,7 @@ export const PkiSyncFns = {
|
||||
}> => {
|
||||
switch (pkiSync.destination) {
|
||||
case PkiSync.AzureKeyVault: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault);
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault as PkiSync);
|
||||
const azureKeyVaultPkiSync = azureKeyVaultPkiSyncFactory({
|
||||
appConnectionDAL: dependencies.appConnectionDAL,
|
||||
kmsService: dependencies.kmsService,
|
||||
@@ -213,7 +229,7 @@ export const PkiSyncFns = {
|
||||
return azureKeyVaultPkiSync.syncCertificates(pkiSync, certificateMap);
|
||||
}
|
||||
case PkiSync.AwsCertificateManager: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager);
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager as PkiSync);
|
||||
const awsCertificateManagerPkiSync = awsCertificateManagerPkiSyncFactory({
|
||||
appConnectionDAL: dependencies.appConnectionDAL,
|
||||
kmsService: dependencies.kmsService,
|
||||
@@ -222,6 +238,22 @@ export const PkiSyncFns = {
|
||||
});
|
||||
return awsCertificateManagerPkiSync.syncCertificates(pkiSync, certificateMap);
|
||||
}
|
||||
case PkiSync.AwsSecretsManager: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AwsSecretsManager as PkiSync);
|
||||
const awsSecretsManagerPkiSync = awsSecretsManagerPkiSyncFactory({
|
||||
certificateDAL: dependencies.certificateDAL,
|
||||
certificateSyncDAL: dependencies.certificateSyncDAL
|
||||
});
|
||||
return awsSecretsManagerPkiSync.syncCertificates(pkiSync, certificateMap);
|
||||
}
|
||||
case PkiSync.Chef: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.Chef as PkiSync);
|
||||
const chefPkiSync = chefPkiSyncFactory({
|
||||
certificateDAL: dependencies.certificateDAL,
|
||||
certificateSyncDAL: dependencies.certificateSyncDAL
|
||||
});
|
||||
return chefPkiSync.syncCertificates(pkiSync, certificateMap);
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`);
|
||||
}
|
||||
@@ -240,7 +272,7 @@ export const PkiSyncFns = {
|
||||
): Promise<void> => {
|
||||
switch (pkiSync.destination) {
|
||||
case PkiSync.AzureKeyVault: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault);
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AzureKeyVault as PkiSync);
|
||||
const azureKeyVaultPkiSync = azureKeyVaultPkiSyncFactory({
|
||||
appConnectionDAL: dependencies.appConnectionDAL,
|
||||
kmsService: dependencies.kmsService,
|
||||
@@ -254,7 +286,7 @@ export const PkiSyncFns = {
|
||||
break;
|
||||
}
|
||||
case PkiSync.AwsCertificateManager: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager);
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AwsCertificateManager as PkiSync);
|
||||
const awsCertificateManagerPkiSync = awsCertificateManagerPkiSyncFactory({
|
||||
appConnectionDAL: dependencies.appConnectionDAL,
|
||||
kmsService: dependencies.kmsService,
|
||||
@@ -267,6 +299,27 @@ export const PkiSyncFns = {
|
||||
});
|
||||
break;
|
||||
}
|
||||
case PkiSync.AwsSecretsManager: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.AwsSecretsManager as PkiSync);
|
||||
const awsSecretsManagerPkiSync = awsSecretsManagerPkiSyncFactory({
|
||||
certificateDAL: dependencies.certificateDAL,
|
||||
certificateSyncDAL: dependencies.certificateSyncDAL
|
||||
});
|
||||
await awsSecretsManagerPkiSync.removeCertificates(pkiSync, dependencies.certificateMap);
|
||||
break;
|
||||
}
|
||||
case PkiSync.Chef: {
|
||||
checkPkiSyncDestination(pkiSync, PkiSync.Chef as PkiSync);
|
||||
const chefPkiSync = chefPkiSyncFactory({
|
||||
certificateDAL: dependencies.certificateDAL,
|
||||
certificateSyncDAL: dependencies.certificateSyncDAL
|
||||
});
|
||||
await chefPkiSync.removeCertificates(pkiSync, certificateNames, {
|
||||
certificateSyncDAL: dependencies.certificateSyncDAL,
|
||||
certificateMap: dependencies.certificateMap
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported PKI sync destination: ${String(pkiSync.destination)}`);
|
||||
}
|
||||
|
||||
@@ -4,10 +4,14 @@ import { PkiSync } from "./pki-sync-enums";
|
||||
|
||||
export const PKI_SYNC_NAME_MAP: Record<PkiSync, string> = {
|
||||
[PkiSync.AzureKeyVault]: "Azure Key Vault",
|
||||
[PkiSync.AwsCertificateManager]: "AWS Certificate Manager"
|
||||
[PkiSync.AwsCertificateManager]: "AWS Certificate Manager",
|
||||
[PkiSync.AwsSecretsManager]: "AWS Secrets Manager",
|
||||
[PkiSync.Chef]: "Chef"
|
||||
};
|
||||
|
||||
export const PKI_SYNC_CONNECTION_MAP: Record<PkiSync, AppConnection> = {
|
||||
[PkiSync.AzureKeyVault]: AppConnection.AzureKeyVault,
|
||||
[PkiSync.AwsCertificateManager]: AppConnection.AWS
|
||||
[PkiSync.AwsCertificateManager]: AppConnection.AWS,
|
||||
[PkiSync.AwsSecretsManager]: AppConnection.AWS,
|
||||
[PkiSync.Chef]: AppConnection.Chef
|
||||
};
|
||||
|
||||
@@ -26,6 +26,7 @@ import { TCertificateSecretDALFactory } from "../certificate/certificate-secret-
|
||||
import { TCertificateAuthorityCertDALFactory } from "../certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal";
|
||||
import { getCaCertChain } from "../certificate-authority/certificate-authority-fns";
|
||||
import { extractRootCaFromChain, removeRootCaFromChain } from "../certificate-common/certificate-utils";
|
||||
import { TCertificateSyncDALFactory } from "../certificate-sync/certificate-sync-dal";
|
||||
import { CertificateSyncStatus } from "../certificate-sync/certificate-sync-enums";
|
||||
import { TPkiSyncDALFactory } from "./pki-sync-dal";
|
||||
@@ -180,11 +181,16 @@ export const pkiSyncQueueFactory = ({
|
||||
(cert, index, self) => self.findIndex((c) => c.id === cert.id) === index
|
||||
);
|
||||
|
||||
if (uniqueCertificates.length === 0) {
|
||||
const activeCertificates = uniqueCertificates.filter((cert) => {
|
||||
const typedCert = cert as TCertificates;
|
||||
return !typedCert.renewedByCertificateId;
|
||||
});
|
||||
|
||||
if (activeCertificates.length === 0) {
|
||||
return { certificateMap, certificateMetadata };
|
||||
}
|
||||
|
||||
certificates = uniqueCertificates;
|
||||
certificates = activeCertificates;
|
||||
|
||||
for (const certificate of certificates) {
|
||||
const cert = certificate as TCertificates;
|
||||
@@ -231,13 +237,15 @@ export const pkiSyncQueueFactory = ({
|
||||
}
|
||||
|
||||
let certificateChain: string | undefined;
|
||||
let caCertificate: string | undefined;
|
||||
try {
|
||||
if (certBody.encryptedCertificateChain) {
|
||||
const decryptedCertChain = await kmsDecryptor({
|
||||
cipherTextBlob: certBody.encryptedCertificateChain
|
||||
});
|
||||
certificateChain = decryptedCertChain.toString();
|
||||
} else if (certificate.caCertId) {
|
||||
}
|
||||
if (certificate.caCertId) {
|
||||
const { caCert, caCertChain } = await getCaCertChain({
|
||||
caCertId: certificate.caCertId,
|
||||
certificateAuthorityDAL,
|
||||
@@ -245,7 +253,10 @@ export const pkiSyncQueueFactory = ({
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
certificateChain = `${caCert}\n${caCertChain}`.trim();
|
||||
if (!certBody.encryptedCertificateChain) {
|
||||
certificateChain = `${caCert}\n${caCertChain}`.trim();
|
||||
}
|
||||
caCertificate = certificateChain ? extractRootCaFromChain(certificateChain) : caCert;
|
||||
}
|
||||
} catch (chainError) {
|
||||
logger.warn(
|
||||
@@ -254,10 +265,16 @@ export const pkiSyncQueueFactory = ({
|
||||
);
|
||||
// Continue without certificate chain
|
||||
certificateChain = undefined;
|
||||
caCertificate = undefined;
|
||||
}
|
||||
|
||||
let certificateName: string;
|
||||
const syncOptions = pkiSync.syncOptions as { certificateNameSchema?: string } | undefined;
|
||||
const syncOptions = pkiSync.syncOptions as
|
||||
| {
|
||||
certificateNameSchema?: string;
|
||||
includeRootCa?: boolean;
|
||||
}
|
||||
| undefined;
|
||||
const certificateNameSchema = syncOptions?.certificateNameSchema;
|
||||
|
||||
if (certificateNameSchema) {
|
||||
@@ -289,10 +306,16 @@ export const pkiSyncQueueFactory = ({
|
||||
alternativeNames.push(originalLegacyName);
|
||||
}
|
||||
|
||||
let processedCertificateChain = certificateChain;
|
||||
if (certificateChain && syncOptions?.includeRootCa === false) {
|
||||
processedCertificateChain = removeRootCaFromChain(certificateChain);
|
||||
}
|
||||
|
||||
certificateMap[certificateName] = {
|
||||
cert: certificatePem,
|
||||
privateKey: certPrivateKey || "",
|
||||
certificateChain,
|
||||
certificateChain: processedCertificateChain,
|
||||
caCertificate,
|
||||
alternativeNames,
|
||||
certificateId: certificate.id
|
||||
};
|
||||
|
||||
@@ -7,6 +7,7 @@ import { PkiSync } from "./pki-sync-enums";
|
||||
export const PkiSyncOptionsSchema = z.object({
|
||||
canImportCertificates: z.boolean(),
|
||||
canRemoveCertificates: z.boolean().optional(),
|
||||
includeRootCa: z.boolean().optional().default(false),
|
||||
certificateNameSchema: z
|
||||
.string()
|
||||
.optional()
|
||||
|
||||
@@ -73,7 +73,14 @@ export type TPkiSyncListItem = TPkiSync & {
|
||||
|
||||
export type TCertificateMap = Record<
|
||||
string,
|
||||
{ cert: string; privateKey: string; certificateChain?: string; alternativeNames?: string[]; certificateId?: string }
|
||||
{
|
||||
cert: string;
|
||||
privateKey: string;
|
||||
certificateChain?: string;
|
||||
caCertificate?: string;
|
||||
alternativeNames?: string[];
|
||||
certificateId?: string;
|
||||
}
|
||||
>;
|
||||
|
||||
export type TCreatePkiSyncDTO = {
|
||||
|
||||
9
backend/tsconfig.dev.json
Normal file
9
backend/tsconfig.dev.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"paths": {
|
||||
"@app/*": ["./src/*"],
|
||||
"@bdd_routes/bdd-nock-router": ["./src/server/routes/bdd/bdd-nock-router.dev.ts"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,7 +24,8 @@
|
||||
"skipLibCheck": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@app/*": ["./src/*"]
|
||||
"@app/*": ["./src/*"],
|
||||
"@bdd_routes/*": ["./src/server/routes/bdd/*"]
|
||||
},
|
||||
"jsx": "react-jsx"
|
||||
},
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
import path from "node:path";
|
||||
|
||||
import fs from "fs/promises";
|
||||
import {replaceTscAliasPaths} from "tsc-alias";
|
||||
import {defineConfig} from "tsup";
|
||||
import { replaceTscAliasPaths } from "tsc-alias";
|
||||
import { defineConfig } from "tsup";
|
||||
|
||||
// Instead of using tsx or tsc for building, consider using tsup.
|
||||
// TSX serves as an alternative to Node.js, allowing you to build directly on the Node.js runtime.
|
||||
@@ -29,7 +29,7 @@ export default defineConfig({
|
||||
external: ["../../../frontend/node_modules/next/dist/server/next-server.js"],
|
||||
outDir: "dist",
|
||||
tsconfig: "./tsconfig.json",
|
||||
entry: ["./src"],
|
||||
entry: ["./src", "!./src/**/*.dev.ts"],
|
||||
sourceMap: true,
|
||||
skipNodeModulesBundle: true,
|
||||
esbuildPlugins: [
|
||||
@@ -45,22 +45,22 @@ export default defineConfig({
|
||||
const isRelativePath = args.path.startsWith(".");
|
||||
const absPath = isRelativePath
|
||||
? path.join(args.resolveDir, args.path)
|
||||
: path.join(args.path.replace("@app", "./src"));
|
||||
: path.join(args.path.replace("@app", "./src").replace("@bdd_routes", "./src/server/routes/bdd"));
|
||||
|
||||
const isFile = await fs
|
||||
.stat(`${absPath}.ts`)
|
||||
.then((el) => el.isFile)
|
||||
.catch(async (err) => {
|
||||
if (err.code === "ENOTDIR") {
|
||||
return true;
|
||||
}
|
||||
.catch(async (err) => {
|
||||
if (err.code === "ENOTDIR") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If .ts file doesn't exist, try checking for .tsx file
|
||||
return fs
|
||||
.stat(`${absPath}.tsx`)
|
||||
.then((el) => el.isFile)
|
||||
.catch((err) => err.code === "ENOTDIR");
|
||||
});
|
||||
// If .ts file doesn't exist, try checking for .tsx file
|
||||
return fs
|
||||
.stat(`${absPath}.tsx`)
|
||||
.then((el) => el.isFile)
|
||||
.catch((err) => err.code === "ENOTDIR");
|
||||
});
|
||||
|
||||
return {
|
||||
path: isFile ? `${args.path}.mjs` : `${args.path}/index.mjs`,
|
||||
|
||||
@@ -28,7 +28,8 @@ export default defineConfig({
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
"@app": path.resolve(__dirname, "./src")
|
||||
"@app": path.resolve(__dirname, "./src"),
|
||||
"@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts")
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -11,7 +11,8 @@ export default defineConfig({
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
"@app": path.resolve(__dirname, "./src")
|
||||
"@app": path.resolve(__dirname, "./src"),
|
||||
"@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts")
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -71,6 +71,7 @@ services:
|
||||
ports:
|
||||
- 4000:4000
|
||||
- 9464:9464 # for OTEL collection of Prometheus metrics
|
||||
- 9229:9229 # For debugger access
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Create AWS Secrets Manager PKI Sync"
|
||||
openapi: "POST /api/v1/pki/syncs/aws-secrets-manager"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete AWS Secrets Manager PKI Sync"
|
||||
openapi: "DELETE /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get AWS Secrets Manager PKI Sync by ID"
|
||||
openapi: "GET /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List AWS Secrets Manager PKI Syncs"
|
||||
openapi: "GET /api/v1/pki/syncs/aws-secrets-manager"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Remove Certificates from AWS Secrets Manager"
|
||||
openapi: "POST /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}/remove-certificates"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Sync Certificates to AWS Secrets Manager"
|
||||
openapi: "POST /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}/sync-certificates"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Update AWS Secrets Manager PKI Sync"
|
||||
openapi: "PATCH /api/v1/pki/syncs/aws-secrets-manager/{pkiSyncId}"
|
||||
---
|
||||
4
docs/api-reference/endpoints/pki/syncs/chef/create.mdx
Normal file
4
docs/api-reference/endpoints/pki/syncs/chef/create.mdx
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Create Chef PKI Sync"
|
||||
openapi: "POST /api/v1/pki/syncs/chef"
|
||||
---
|
||||
4
docs/api-reference/endpoints/pki/syncs/chef/delete.mdx
Normal file
4
docs/api-reference/endpoints/pki/syncs/chef/delete.mdx
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete Chef PKI Sync"
|
||||
openapi: "DELETE /api/v1/pki/syncs/chef/{pkiSyncId}"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get Chef PKI Sync by ID"
|
||||
openapi: "GET /api/v1/pki/syncs/chef/{pkiSyncId}"
|
||||
---
|
||||
4
docs/api-reference/endpoints/pki/syncs/chef/list.mdx
Normal file
4
docs/api-reference/endpoints/pki/syncs/chef/list.mdx
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List Chef PKI Syncs"
|
||||
openapi: "GET /api/v1/pki/syncs/chef"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Remove Certificates from Chef"
|
||||
openapi: "POST /api/v1/pki/syncs/chef/{pkiSyncId}/remove-certificates"
|
||||
---
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Sync Certificates to Chef"
|
||||
openapi: "POST /api/v1/pki/syncs/chef/{pkiSyncId}/sync"
|
||||
---
|
||||
4
docs/api-reference/endpoints/pki/syncs/chef/update.mdx
Normal file
4
docs/api-reference/endpoints/pki/syncs/chef/update.mdx
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Update Chef PKI Sync"
|
||||
openapi: "PATCH /api/v1/pki/syncs/chef/{pkiSyncId}"
|
||||
---
|
||||
@@ -40,12 +40,12 @@ sudo infisical gateway start --name=<name> --auth-method=<auth-method>
|
||||
```
|
||||
|
||||
<Info>
|
||||
By default, the gateway automatically connects to the relay with the lowest latency. To target a specific relay, use the `--relay=<relay-name>` flag.
|
||||
By default, the gateway automatically connects to the relay with the lowest latency. To target a specific relay, use the `--target-relay-name=<relay-name>` flag.
|
||||
</Info>
|
||||
|
||||
Once started, the gateway component will:
|
||||
|
||||
- Automatically connect to a healthy relay with the lowest latency (unless the `--relay` flag is specified)
|
||||
- Automatically connect to a healthy relay with the lowest latency (unless the `--target-relay-name` flag is specified)
|
||||
- Establish outbound SSH reverse tunnel to relay server (no inbound firewall rules needed)
|
||||
- Authenticate using SSH certificates issued by Infisical
|
||||
- Automatically reconnect if the connection is lost
|
||||
@@ -252,14 +252,14 @@ The Gateway supports multiple authentication methods. Below are the available au
|
||||
### Other Flags
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="--relay">
|
||||
<Accordion title="--target-relay-name">
|
||||
The name of the relay that this gateway should connect to. The relay must be running and registered before starting the gateway.
|
||||
|
||||
If this flag is omitted, the gateway will automatically connect to a healthy relay with the lowest latency.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
sudo infisical gateway start --relay=my-relay --name=my-gateway --token=<token>
|
||||
sudo infisical gateway start --target-relay-name=my-relay --name=my-gateway --token=<token>
|
||||
```
|
||||
|
||||
**Note:** For Infisical Cloud users using instance relays, the relay infrastructure is already running and managed by Infisical. If using organization relays or self-hosted instance relays, you must first start a relay server. For more information on deploying relays, refer to the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment).
|
||||
@@ -336,14 +336,14 @@ sudo infisical gateway systemd install --token=<token> --domain=<domain> --name=
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--relay">
|
||||
<Accordion title="--target-relay-name">
|
||||
The name of the relay that this gateway should connect to. The relay must be running and registered before starting the gateway.
|
||||
|
||||
If this flag is omitted, the gateway will automatically connect to a healthy relay with the lowest latency.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
sudo infisical gateway systemd install --relay=my-relay --token=<token> --name=<name>
|
||||
sudo infisical gateway systemd install --target-relay-name=my-relay --token=<token> --name=<name>
|
||||
```
|
||||
|
||||
**Note:** For Infisical Cloud users using instance relays, the relay infrastructure is already running and managed by Infisical. If using organization relays or self-hosted instance relays, you must first start a relay server. For more information on deploying relays, refer to the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment).
|
||||
@@ -687,7 +687,7 @@ sudo systemctl disable infisical-gateway # Disable auto-start on boot
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="How are relays auto-selected?">
|
||||
If the `--relay` flag is omitted, the gateway automatically selects the optimal relay. It first checks for healthy organization relays and connects to the one with the lowest latency. If no organization relays are available, it then performs the same latency-based selection among the available managed relays.
|
||||
If the `--target-relay-name` flag is omitted, the gateway automatically selects the optimal relay. It first checks for healthy organization relays and connects to the one with the lowest latency. If no organization relays are available, it then performs the same latency-based selection among the available managed relays.
|
||||
</Accordion>
|
||||
<Accordion title="When restarting the gateway without a relay flag, does it select a new relay every time?">
|
||||
No. The first time the gateway starts, it selects the optimal relay (based on latency) and caches that selection. On subsequent restarts, it will prioritize connecting to the cached relay. If it's unable to connect, it will then re-evaluate and connect to the next most optimal relay available.
|
||||
|
||||
@@ -766,7 +766,9 @@
|
||||
"pages": [
|
||||
"documentation/platform/pki/certificate-syncs/overview",
|
||||
"documentation/platform/pki/certificate-syncs/aws-certificate-manager",
|
||||
"documentation/platform/pki/certificate-syncs/azure-key-vault"
|
||||
"documentation/platform/pki/certificate-syncs/aws-secrets-manager",
|
||||
"documentation/platform/pki/certificate-syncs/azure-key-vault",
|
||||
"documentation/platform/pki/certificate-syncs/chef"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -2617,6 +2619,18 @@
|
||||
"api-reference/endpoints/pki/syncs/aws-certificate-manager/remove-certificates"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "AWS Secrets Manager",
|
||||
"pages": [
|
||||
"api-reference/endpoints/pki/syncs/aws-secrets-manager/list",
|
||||
"api-reference/endpoints/pki/syncs/aws-secrets-manager/get-by-id",
|
||||
"api-reference/endpoints/pki/syncs/aws-secrets-manager/create",
|
||||
"api-reference/endpoints/pki/syncs/aws-secrets-manager/update",
|
||||
"api-reference/endpoints/pki/syncs/aws-secrets-manager/delete",
|
||||
"api-reference/endpoints/pki/syncs/aws-secrets-manager/sync-certificates",
|
||||
"api-reference/endpoints/pki/syncs/aws-secrets-manager/remove-certificates"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Azure Key Vault",
|
||||
"pages": [
|
||||
@@ -2628,6 +2642,18 @@
|
||||
"api-reference/endpoints/pki/syncs/azure-key-vault/sync-certificates",
|
||||
"api-reference/endpoints/pki/syncs/azure-key-vault/remove-certificates"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Chef",
|
||||
"pages": [
|
||||
"api-reference/endpoints/pki/syncs/chef/list",
|
||||
"api-reference/endpoints/pki/syncs/chef/get-by-id",
|
||||
"api-reference/endpoints/pki/syncs/chef/create",
|
||||
"api-reference/endpoints/pki/syncs/chef/update",
|
||||
"api-reference/endpoints/pki/syncs/chef/delete",
|
||||
"api-reference/endpoints/pki/syncs/chef/sync-certificates",
|
||||
"api-reference/endpoints/pki/syncs/chef/remove-certificates"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -127,7 +127,7 @@ To successfully deploy an Infisical Gateway for use, follow these steps in order
|
||||
```
|
||||
|
||||
<Info>
|
||||
By default, the gateway connects to the most optimal relay. Use the `--relay` flag to manually specify a different relay server.
|
||||
By default, the gateway connects to the most optimal relay. Use the `--target-relay-name` flag to manually specify a different relay server.
|
||||
</Info>
|
||||
|
||||
<Warning>
|
||||
@@ -177,7 +177,7 @@ To successfully deploy an Infisical Gateway for use, follow these steps in order
|
||||
```
|
||||
|
||||
<Info>
|
||||
By default, the gateway connects to the most optimal relay. Use the `--relay` flag to manually specify a different relay server.
|
||||
By default, the gateway connects to the most optimal relay. Use the `--target-relay-name` flag to manually specify a different relay server.
|
||||
</Info>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
@@ -39,6 +39,7 @@ These permissions allow Infisical to list, import, tag, and manage certificates
|
||||
|
||||
- **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical.
|
||||
- **Preserve ARN on Renewal**: If enabled, Infisical will sync renewed certificates to the destination under the same ARN as the original synced certificate instead of creating a new certificate with a new ARN.
|
||||
- **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to AWS Certificate Manager. If disabled, only intermediate certificates will be included.
|
||||
- **Certificate Name Schema** (Optional): Customize how certificate tags are generated in AWS Certificate Manager. Must include `{{certificateId}}` as a placeholder for the certificate ID to ensure proper certificate identification and management. If not specified, defaults to `Infisical-{{certificateId}}`.
|
||||
- **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only.
|
||||
|
||||
@@ -86,6 +87,7 @@ These permissions allow Infisical to list, import, tag, and manage certificates
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveArnOnRenewal": true,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}"
|
||||
},
|
||||
"destinationConfig": {
|
||||
@@ -110,6 +112,7 @@ These permissions allow Infisical to list, import, tag, and manage certificates
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveArnOnRenewal": true,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}"
|
||||
},
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
|
||||
@@ -0,0 +1,251 @@
|
||||
---
|
||||
title: "AWS Secrets Manager"
|
||||
description: "Learn how to configure an AWS Secrets Manager Certificate Sync for Infisical PKI."
|
||||
---
|
||||
|
||||
**Prerequisites:**
|
||||
|
||||
- Create an [AWS Connection](/integrations/app-connections/aws)
|
||||
- Ensure your network security policies allow incoming requests from Infisical to this certificate sync provider, if network restrictions apply.
|
||||
|
||||
<Note>
|
||||
The AWS Secrets Manager Certificate Sync requires the following permissions to be set on the AWS IAM user
|
||||
for Infisical to sync certificates to AWS Secrets Manager: `secretsmanager:CreateSecret`, `secretsmanager:UpdateSecret`,
|
||||
`secretsmanager:GetSecretValue`, `secretsmanager:DeleteSecret`, `secretsmanager:ListSecrets`.
|
||||
|
||||
Any role with these permissions would work such as a custom policy with **SecretsManager** permissions.
|
||||
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
Certificates synced to AWS Secrets Manager will be stored as JSON secrets,
|
||||
preserving both the certificate and private key components as separate fields within the secret value.
|
||||
</Note>
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Infisical UI">
|
||||
1. Navigate to **Project** > **Integrations** > **Certificate Syncs** and press **Add Sync**.
|
||||

|
||||
|
||||
2. Select the **AWS Secrets Manager** option.
|
||||

|
||||
|
||||
3. Configure the **Destination** to where certificates should be deployed, then click **Next**.
|
||||

|
||||
|
||||
- **AWS Connection**: The AWS Connection to authenticate with.
|
||||
- **Region**: The AWS region where secrets will be stored.
|
||||
|
||||
4. Configure the **Sync Options** to specify how certificates should be synced, then click **Next**.
|
||||

|
||||
|
||||
- **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical.
|
||||
- **Preserve Secret on Renewal**: Only applies to certificate renewals. When a certificate is renewed in Infisical, this option controls how the renewed certificate is handled. If enabled, the renewed certificate will update the existing secret, preserving the same secret name. If disabled, the renewed certificate will be created as a new secret with a new name.
|
||||
- **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to AWS Secrets Manager. If disabled, only intermediate certificates will be included.
|
||||
- **Certificate Name Schema** (Optional): Customize how secret names are generated in AWS Secrets Manager. Use `{{certificateId}}` as a placeholder for the certificate ID.
|
||||
- **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only.
|
||||
|
||||
5. Configure the **Field Mappings** to customize how certificate data is stored in AWS Secrets Manager secrets, then click **Next**.
|
||||

|
||||
|
||||
- **Certificate Field**: The field name where the certificate will be stored in the secret value (default: `certificate`)
|
||||
- **Private Key Field**: The field name where the private key will be stored in the secret value (default: `private_key`)
|
||||
- **Certificate Chain Field**: The field name where the full certificate chain excluding the root CA certificate will be stored (default: `certificate_chain`)
|
||||
- **CA Certificate Field**: The field name where the root CA certificate will be stored (default: `ca_certificate`)
|
||||
|
||||
<Tip>
|
||||
**AWS Secrets Manager Secret Structure**: Certificates are stored in AWS Secrets Manager as JSON secrets with the following structure (field names can be customized via field mappings):
|
||||
```json
|
||||
{
|
||||
"certificate": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\n...",
|
||||
"certificate_chain": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"ca_certificate": "-----BEGIN CERTIFICATE-----\n..."
|
||||
}
|
||||
```
|
||||
|
||||
**Example with Custom Field Mappings**:
|
||||
```json
|
||||
{
|
||||
"ssl_cert": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"ssl_key": "-----BEGIN PRIVATE KEY-----\n...",
|
||||
"ssl_chain": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"ssl_ca": "-----BEGIN CERTIFICATE-----\n..."
|
||||
}
|
||||
```
|
||||
</Tip>
|
||||
|
||||
6. Configure the **Details** of your AWS Secrets Manager Certificate Sync, then click **Next**.
|
||||

|
||||
|
||||
- **Name**: The name of your sync. Must be slug-friendly.
|
||||
- **Description**: An optional description for your sync.
|
||||
|
||||
7. Select which certificates should be synced to AWS Secrets Manager.
|
||||

|
||||
|
||||
8. Review your AWS Secrets Manager Certificate Sync configuration, then click **Create Sync**.
|
||||

|
||||
|
||||
9. If enabled, your AWS Secrets Manager Certificate Sync will begin syncing your certificates to the destination endpoint.
|
||||

|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
To create an **AWS Secrets Manager Certificate Sync**, make an API request to the [Create AWS Secrets Manager Certificate Sync](/api-reference/endpoints/pki/syncs/aws-secrets-manager/create) API endpoint.
|
||||
|
||||
### Sample request
|
||||
|
||||
<Note>
|
||||
You can optionally specify `certificateIds` during sync creation to immediately add certificates to the sync.
|
||||
If not provided, you can add certificates later using the certificate management endpoints.
|
||||
</Note>
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://app.infisical.com/api/v1/pki/syncs/aws-secrets-manager \
|
||||
--header 'Authorization: Bearer <access-token>' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"name": "my-aws-secrets-manager-cert-sync",
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"description": "an example certificate sync",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"destination": "aws-secrets-manager",
|
||||
"isAutoSyncEnabled": true,
|
||||
"certificateIds": [
|
||||
"550e8400-e29b-41d4-a716-446655440000",
|
||||
"660f1234-e29b-41d4-a716-446655440001"
|
||||
],
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveSecretOnRenewal": true,
|
||||
"canImportCertificates": false,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}",
|
||||
"fieldMappings": {
|
||||
"certificate": "ssl_cert",
|
||||
"privateKey": "ssl_key",
|
||||
"certificateChain": "ssl_chain",
|
||||
"caCertificate": "ssl_ca"
|
||||
}
|
||||
},
|
||||
"destinationConfig": {
|
||||
"region": "us-east-1",
|
||||
"keyId": "alias/my-kms-key"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Example with Default Field Mappings
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://app.infisical.com/api/v1/pki/syncs/aws-secrets-manager \
|
||||
--header 'Authorization: Bearer <access-token>' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"name": "my-aws-secrets-manager-cert-sync-default",
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"description": "AWS Secrets Manager sync with default field mappings",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"destination": "aws-secrets-manager",
|
||||
"isAutoSyncEnabled": true,
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveSecretOnRenewal": true,
|
||||
"canImportCertificates": false,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "infisical-{{certificateId}}",
|
||||
"fieldMappings": {
|
||||
"certificate": "certificate",
|
||||
"privateKey": "private_key",
|
||||
"certificateChain": "certificate_chain",
|
||||
"caCertificate": "ca_certificate"
|
||||
}
|
||||
},
|
||||
"destinationConfig": {
|
||||
"region": "us-west-2"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```json Response
|
||||
{
|
||||
"pkiSync": {
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"name": "my-aws-secrets-manager-cert-sync",
|
||||
"description": "an example certificate sync",
|
||||
"destination": "aws-secrets-manager",
|
||||
"isAutoSyncEnabled": true,
|
||||
"destinationConfig": {
|
||||
"region": "us-east-1",
|
||||
"keyId": "alias/my-kms-key"
|
||||
},
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveSecretOnRenewal": true,
|
||||
"canImportCertificates": false,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}",
|
||||
"fieldMappings": {
|
||||
"certificate": "ssl_cert",
|
||||
"privateKey": "ssl_key",
|
||||
"certificateChain": "ssl_chain",
|
||||
"caCertificate": "ssl_ca"
|
||||
}
|
||||
},
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"createdAt": "2023-01-01T00:00:00.000Z",
|
||||
"updatedAt": "2023-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
</Tab>
|
||||
|
||||
</Tabs>
|
||||
|
||||
## Certificate Management
|
||||
|
||||
Your AWS Secrets Manager Certificate Sync will:
|
||||
|
||||
- **Automatic Deployment**: Deploy certificates in Infisical to AWS Secrets Manager as JSON secrets with customizable field names
|
||||
- **Certificate Updates**: Update certificates in AWS Secrets Manager when renewals occur
|
||||
- **Expiration Handling**: Optionally remove expired certificates from AWS Secrets Manager (if enabled)
|
||||
- **Format Preservation**: Maintain certificate format during sync operations
|
||||
- **Field Customization**: Map certificate data to custom field names that match your application requirements
|
||||
- **CA Certificate Support**: Include CA certificates in secrets for complete certificate chain management
|
||||
- **KMS Encryption**: Optionally use custom KMS keys for secret encryption
|
||||
- **Regional Deployment**: Deploy secrets to specific AWS regions
|
||||
|
||||
<Note>
|
||||
AWS Secrets Manager Certificate Syncs support both automatic and manual
|
||||
synchronization modes. When auto-sync is enabled, certificates are
|
||||
automatically deployed as they are issued or renewed.
|
||||
</Note>
|
||||
|
||||
## Manual Certificate Sync
|
||||
|
||||
You can manually trigger certificate synchronization to AWS Secrets Manager using the sync certificates functionality. This is useful for:
|
||||
|
||||
- Initial setup when you have existing certificates to deploy
|
||||
- One-time sync of specific certificates
|
||||
- Testing certificate sync configurations
|
||||
- Force sync after making changes
|
||||
|
||||
To manually sync certificates, use the [Sync Certificates](/api-reference/endpoints/pki/syncs/aws-secrets-manager/sync-certificates) API endpoint or the manual sync option in the Infisical UI.
|
||||
|
||||
<Note>
|
||||
AWS Secrets Manager does not support importing certificates back into Infisical
|
||||
due to the nature of AWS Secrets Manager where certificates are stored as JSON secrets
|
||||
rather than managed certificate objects.
|
||||
</Note>
|
||||
|
||||
## Secret Naming Constraints
|
||||
|
||||
AWS Secrets Manager has specific naming requirements for secrets:
|
||||
|
||||
- **Allowed Characters**: Letters, numbers, hyphens (-), and underscores (_) only
|
||||
- **Length**: 1-512 characters
|
||||
@@ -40,6 +40,7 @@ Any role with these permissions would work such as the **Key Vault Certificates
|
||||
|
||||
- **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical.
|
||||
- **Enable Versioning on Renewal**: If enabled, Infisical will sync renewed certificates to the destination under a new version of the original synced certificate instead of creating a new certificate.
|
||||
- **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to Azure Key Vault. If disabled, only intermediate certificates will be included.
|
||||
- **Certificate Name Schema** (Optional): Customize how certificate names are generated in Azure Key Vault. Use `{{certificateId}}` as a placeholder for the certificate ID. If not specified, defaults to `Infisical-{{certificateId}}`.
|
||||
- **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only.
|
||||
|
||||
@@ -93,6 +94,7 @@ Any role with these permissions would work such as the **Key Vault Certificates
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"enableVersioningOnRenewal": true,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}"
|
||||
},
|
||||
"destinationConfig": {
|
||||
@@ -117,6 +119,7 @@ Any role with these permissions would work such as the **Key Vault Certificates
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"enableVersioningOnRenewal": true,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}"
|
||||
},
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
|
||||
241
docs/documentation/platform/pki/certificate-syncs/chef.mdx
Normal file
241
docs/documentation/platform/pki/certificate-syncs/chef.mdx
Normal file
@@ -0,0 +1,241 @@
|
||||
---
|
||||
title: "Chef"
|
||||
description: "Learn how to configure a Chef Certificate Sync for Infisical PKI."
|
||||
---
|
||||
|
||||
**Prerequisites:**
|
||||
|
||||
- Create a [Chef Connection](/integrations/app-connections/chef)
|
||||
- Ensure your network security policies allow incoming requests from Infisical to this certificate sync provider, if network restrictions apply.
|
||||
|
||||
<Note>
|
||||
The Chef Certificate Sync requires the following permissions to be set on the Chef user
|
||||
for Infisical to sync certificates to Chef: `data bag read`, `data bag create`, `data bag update`, `data bag delete`.
|
||||
|
||||
Any role with these permissions would work such as a custom role with **Data Bag** permissions.
|
||||
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
Certificates synced to Chef will be stored as data bag items within the specified data bag,
|
||||
preserving both the certificate and private key components as separate fields.
|
||||
</Note>
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Infisical UI">
|
||||
1. Navigate to **Project** > **Integrations** > **Certificate Syncs** and press **Add Sync**.
|
||||

|
||||
|
||||
2. Select the **Chef** option.
|
||||

|
||||
|
||||
3. Configure the **Destination** to where certificates should be deployed, then click **Next**.
|
||||

|
||||
|
||||
- **Chef Connection**: The Chef Connection to authenticate with.
|
||||
- **Data Bag Name**: The name of the Chef data bag where certificates will be stored.
|
||||
|
||||
4. Configure the **Sync Options** to specify how certificates should be synced, then click **Next**.
|
||||

|
||||
|
||||
- **Enable Removal of Expired/Revoked Certificates**: If enabled, Infisical will remove certificates from the destination if they are no longer active in Infisical.
|
||||
- **Preserve Data Bag Item on Renewal**: Only applies to certificate renewals. When a certificate is renewed in Infisical, this option controls how the renewed certificate is handled. If enabled, the renewed certificate will update the existing data bag item, preserving the same item name. If disabled, the renewed certificate will be created as a new data bag item with a new name.
|
||||
- **Include Root CA**: If enabled, the Root CA certificate will be included in the certificate chain when syncing to Chef data bags. If disabled, only intermediate certificates will be included.
|
||||
- **Certificate Name Schema** (Optional): Customize how certificate item names are generated in Chef data bags. Use `{{certificateId}}` as a placeholder for the certificate ID.
|
||||
- **Auto-Sync Enabled**: If enabled, certificates will automatically be synced when changes occur. Disable to enforce manual syncing only.
|
||||
|
||||
5. Configure the **Field Mappings** to customize how certificate data is stored in Chef data bag items, then click **Next**.
|
||||

|
||||
|
||||
- **Certificate Field**: The field name where the certificate will be stored in the data bag item (default: `certificate`)
|
||||
- **Private Key Field**: The field name where the private key will be stored in the data bag item (default: `private_key`)
|
||||
- **Certificate Chain Field**: The field name where the full certificate chain excluding the root CA certificate will be stored (default: `certificate_chain`)
|
||||
- **CA Certificate Field**: The field name where the root CA certificate will be stored (default: `ca_certificate`)
|
||||
|
||||
<Tip>
|
||||
**Chef Data Bag Item Structure**: Certificates are stored in Chef data bags as items with the following structure (field names can be customized via field mappings):
|
||||
```json
|
||||
{
|
||||
"id": "certificate-item-name",
|
||||
"certificate": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\n...",
|
||||
"certificate_chain": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"ca_certificate": "-----BEGIN CERTIFICATE-----\n..."
|
||||
}
|
||||
```
|
||||
|
||||
**Example with Custom Field Mappings**:
|
||||
```json
|
||||
{
|
||||
"id": "certificate-item-name",
|
||||
"ssl_cert": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"ssl_key": "-----BEGIN PRIVATE KEY-----\n...",
|
||||
"ssl_chain": "-----BEGIN CERTIFICATE-----\n...",
|
||||
"ssl_ca": "-----BEGIN CERTIFICATE-----\n..."
|
||||
}
|
||||
```
|
||||
</Tip>
|
||||
|
||||
6. Configure the **Details** of your Chef Certificate Sync, then click **Next**.
|
||||

|
||||
|
||||
- **Name**: The name of your sync. Must be slug-friendly.
|
||||
- **Description**: An optional description for your sync.
|
||||
|
||||
7. Select which certificates should be synced to Chef.
|
||||

|
||||
|
||||
8. Review your Chef Certificate Sync configuration, then click **Create Sync**.
|
||||

|
||||
|
||||
9. If enabled, your Chef Certificate Sync will begin syncing your certificates to the destination endpoint.
|
||||

|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
To create a **Chef Certificate Sync**, make an API request to the [Create Chef Certificate Sync](/api-reference/endpoints/pki/syncs/chef/create) API endpoint.
|
||||
|
||||
### Sample request
|
||||
|
||||
<Note>
|
||||
You can optionally specify `certificateIds` during sync creation to immediately add certificates to the sync.
|
||||
If not provided, you can add certificates later using the certificate management endpoints.
|
||||
</Note>
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://app.infisical.com/api/v1/pki/syncs/chef \
|
||||
--header 'Authorization: Bearer <access-token>' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"name": "my-chef-cert-sync",
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"description": "an example certificate sync",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"destination": "chef",
|
||||
"isAutoSyncEnabled": true,
|
||||
"certificateIds": [
|
||||
"550e8400-e29b-41d4-a716-446655440000",
|
||||
"660f1234-e29b-41d4-a716-446655440001"
|
||||
],
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveSecretOnRenewal": true,
|
||||
"canImportCertificates": false,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}",
|
||||
"fieldMappings": {
|
||||
"certificate": "ssl_cert",
|
||||
"privateKey": "ssl_key",
|
||||
"certificateChain": "ssl_chain",
|
||||
"caCertificate": "ssl_ca"
|
||||
}
|
||||
},
|
||||
"destinationConfig": {
|
||||
"dataBagName": "ssl_certificates"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Example with Default Field Mappings
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://app.infisical.com/api/v1/pki/syncs/chef \
|
||||
--header 'Authorization: Bearer <access-token>' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"name": "my-chef-cert-sync-default",
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"description": "Chef sync with default field mappings",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"destination": "chef",
|
||||
"isAutoSyncEnabled": true,
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveSecretOnRenewal": true,
|
||||
"canImportCertificates": false,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "{{commonName}}-{{certificateId}}",
|
||||
"fieldMappings": {
|
||||
"certificate": "certificate",
|
||||
"privateKey": "private_key",
|
||||
"certificateChain": "certificate_chain",
|
||||
"caCertificate": "ca_certificate"
|
||||
}
|
||||
},
|
||||
"destinationConfig": {
|
||||
"dataBagName": "certificates"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```json Response
|
||||
{
|
||||
"pkiSync": {
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"name": "my-chef-cert-sync",
|
||||
"description": "an example certificate sync",
|
||||
"destination": "chef",
|
||||
"isAutoSyncEnabled": true,
|
||||
"destinationConfig": {
|
||||
"dataBagName": "ssl_certificates"
|
||||
},
|
||||
"syncOptions": {
|
||||
"canRemoveCertificates": true,
|
||||
"preserveSecretOnRenewal": true,
|
||||
"canImportCertificates": false,
|
||||
"includeRootCa": false,
|
||||
"certificateNameSchema": "myapp-{{certificateId}}",
|
||||
"fieldMappings": {
|
||||
"certificate": "ssl_cert",
|
||||
"privateKey": "ssl_key",
|
||||
"certificateChain": "ssl_chain",
|
||||
"caCertificate": "ssl_ca"
|
||||
}
|
||||
},
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"createdAt": "2023-01-01T00:00:00.000Z",
|
||||
"updatedAt": "2023-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
</Tab>
|
||||
|
||||
</Tabs>
|
||||
|
||||
## Certificate Management
|
||||
|
||||
Your Chef Certificate Sync will:
|
||||
|
||||
- **Automatic Deployment**: Deploy certificates in Infisical to Chef data bags with customizable field names
|
||||
- **Certificate Updates**: Update certificates in Chef data bags when renewals occur
|
||||
- **Expiration Handling**: Optionally remove expired certificates from Chef data bags (if enabled)
|
||||
- **Format Preservation**: Maintain certificate format during sync operations
|
||||
- **Field Customization**: Map certificate data to custom field names that match your Chef cookbook requirements
|
||||
- **CA Certificate Support**: Include CA certificates in data bag items for complete certificate chain management
|
||||
|
||||
<Note>
|
||||
Chef Certificate Syncs support both automatic and manual
|
||||
synchronization modes. When auto-sync is enabled, certificates are
|
||||
automatically deployed as they are issued or renewed.
|
||||
</Note>
|
||||
|
||||
## Manual Certificate Sync
|
||||
|
||||
You can manually trigger certificate synchronization to Chef using the sync certificates functionality. This is useful for:
|
||||
|
||||
- Initial setup when you have existing certificates to deploy
|
||||
- One-time sync of specific certificates
|
||||
- Testing certificate sync configurations
|
||||
- Force sync after making changes
|
||||
|
||||
To manually sync certificates, use the [Sync Certificates](/api-reference/endpoints/pki/syncs/chef/sync-certificates) API endpoint or the manual sync option in the Infisical UI.
|
||||
|
||||
<Note>
|
||||
Chef does not support importing certificates back into Infisical
|
||||
due to the nature of Chef data bags where certificates are stored as data
|
||||
rather than managed certificate objects.
|
||||
</Note>
|
||||
@@ -83,6 +83,7 @@ should be synced. Follow these steps to start syncing:
|
||||
- <strong>Certificates:</strong> The certificates you wish to push to the destination.
|
||||
- <strong>Options:</strong> Customize how certificates should be synced, including:
|
||||
- Whether certificates should be removed from the destination when they expire.
|
||||
- Whether to include the Root CA certificate in the certificate chain.
|
||||
- Certificate naming schema to control how certificate names are generated in
|
||||
the destination.
|
||||
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 420 KiB After Width: | Height: | Size: 378 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 318 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 303 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 309 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 362 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user