diff --git a/.gitignore b/.gitignore index b4e9a07c2f..0ad950da30 100644 --- a/.gitignore +++ b/.gitignore @@ -74,3 +74,4 @@ cli/test/infisical-merge backend/bdd/.bdd-infisical-bootstrap-result.json /npm/bin +__pycache__ diff --git a/backend/bdd/features/environment.py b/backend/bdd/features/environment.py index 9a2e9f90b0..976998c72b 100644 --- a/backend/bdd/features/environment.py +++ b/backend/bdd/features/environment.py @@ -3,6 +3,7 @@ import os import pathlib import typing +from copy import deepcopy import httpx from behave.runner import Context @@ -185,28 +186,33 @@ def bootstrap_infisical(context: Context): def before_all(context: Context): + base_vars = { + "BASE_URL": BASE_URL, + "PEBBLE_URL": PEBBLE_URL, + } if BOOTSTRAP_INFISICAL: details = bootstrap_infisical(context) - context.vars = { - "BASE_URL": BASE_URL, - "PEBBLE_URL": PEBBLE_URL, + vars = base_vars | { "PROJECT_ID": details["project"]["id"], "CERT_CA_ID": details["ca"]["id"], "CERT_TEMPLATE_ID": details["cert_template"]["id"], "AUTH_TOKEN": details["auth_token"], } else: - context.vars = { - "BASE_URL": BASE_URL, - "PEBBLE_URL": PEBBLE_URL, + vars = base_vars | { "PROJECT_ID": PROJECT_ID, "CERT_CA_ID": CERT_CA_ID, "CERT_TEMPLATE_ID": CERT_TEMPLATE_ID, "AUTH_TOKEN": AUTH_TOKEN, } + context._initial_vars = vars context.http_client = httpx.Client(base_url=BASE_URL) +def before_scenario(context: Context, scenario: typing.Any): + context.vars = deepcopy(context._initial_vars) + + def after_scenario(context: Context, scenario: typing.Any): if hasattr(context, "web_server"): context.web_server.shutdown_and_server_close() diff --git a/backend/bdd/features/pki/acme/access-control.feature b/backend/bdd/features/pki/acme/access-control.feature index 6615d00f80..50588be765 100644 --- a/backend/bdd/features/pki/acme/access-control.feature +++ b/backend/bdd/features/pki/acme/access-control.feature @@ -221,7 +221,6 @@ Feature: Access Control | order | .authorizations[0].uri | auth_uri | {auth_uri} | | | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | {} | - Scenario Outline: URL mismatch Given I have an ACME cert profile as "acme_profile" When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" @@ -271,3 +270,52 @@ Feature: Access Control | order | .authorizations[0].uri | auth_uri | {auth_uri} | https://example.com/acmes/auths/FOOBAR | URL mismatch in the protected header | | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | BAD | Invalid URL in the protected header | | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | https://example.com/acmes/challenges/FOOBAR | URL mismatch in the protected header | + + Scenario Outline: Send KID and JWK in the same time + Given I have an ACME cert profile as "acme_profile" + When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" + Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account + And I memorize acme_account.uri with jq "capture("/(?[^/]+)$") | .id" as account_id + When I create certificate signing request as csr + Then I add names to certificate signing request csr + """ + { + "COMMON_NAME": "localhost" + } + """ + Then I create a RSA private key pair as cert_key + And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format + And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order + And I peak and memorize the next nonce as nonce_value + And I memorize with jq "" as + When I send a raw ACME request to "" + """ + { + "protected": { + "alg": "RS256", + "nonce": "{nonce_value}", + "url": "", + "kid": "{acme_account.uri}", + "jwk": { + "n": "mmEWxUv2lUYDZe_M2FXJ_WDXgHoEG7PVvg-dfz1STzyMwx0qvM66KMenXSyVA0r-_Ssb6p8VexSWGOFKskM4ryKUihn2KNH5e8nXZBqzqYeKQ8vqaCdaWzTxFI1dg0xhk0CWptkZHxpRpLalztFJ1Pq7L2qvQOM2YT7wPYbwQhpaSiVNXAb1W4FwAPyC04v1mHehvST-esaDT7j_5-eU5cCcmyi4_g5nBawcinOjj5o3VCg4X8UjK--AjhAyYHx1nRMr-7xk4x-0VIpQ_OODjLB3WzN8s1YEb0Jx5Bv1JyeCw35zahqs3fAFyRje-p5ENk9NCxfz5x9ZGkszkkNt0Q", + "e": "AQAB", + "kty": "RSA" + } + }, + "payload": {} + } + """ + Then the value response.status_code should be equal to 400 + And the value response with jq ".status" should be equal to 400 + And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:malformed" + And the value response with jq ".detail" should be equal to "Both JWK and KID are provided in the protected header" + + Examples: Endpoints + | src_var | jq | dest_var | url | + | order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/{account_id}/orders | + | order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order | + | order | . | not_used | {order.uri} | + | order | . | not_used | {order.uri}/finalize | + | order | . | not_used | {order.uri}/certificate | + | order | .authorizations[0].uri | auth_uri | {auth_uri} | + | order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | diff --git a/backend/bdd/features/pki/acme/account.feature b/backend/bdd/features/pki/acme/account.feature index 589c5ab244..14e304c6ca 100644 --- a/backend/bdd/features/pki/acme/account.feature +++ b/backend/bdd/features/pki/acme/account.feature @@ -6,13 +6,32 @@ Feature: Account Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account And the value acme_account.uri with jq "." should match pattern {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/(.+) + Scenario: Create a new account with the same key pair twice + Given I have an ACME cert profile as "acme_profile" + When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" + Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account + And I memorize acme_account.uri as kid + And I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account2 + And the value error.__class__.__name__ should be equal to "ConflictError" + And the value error.location should be equal to "{kid}" + Scenario: Find an existing account Given I have an ACME cert profile as "acme_profile" When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account And I memorize acme_account.uri as account_uri - And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account - And the value acme_account.uri should be equal to "{account_uri}" + And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as retrieved_account + And the value retrieved_account.uri should be equal to "{account_uri}" + + # Note: This is a very special case for cert-manager. + Scenario: Create a new account with EAB then retrieve it without EAB + Given I have an ACME cert profile as "acme_profile" + When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory" + Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account + And I memorize acme_account.uri as account_uri + And I find the existing ACME account without EAB as retrieved_account + And the value error with should be absent + And the value retrieved_account.uri should be equal to "{account_uri}" Scenario: Create a new account without EAB Given I have an ACME cert profile as "acme_profile" diff --git a/backend/bdd/features/pki/acme/dicrectory.feature b/backend/bdd/features/pki/acme/directory.feature similarity index 86% rename from backend/bdd/features/pki/acme/dicrectory.feature rename to backend/bdd/features/pki/acme/directory.feature index 664ff7457d..53084a6817 100644 --- a/backend/bdd/features/pki/acme/dicrectory.feature +++ b/backend/bdd/features/pki/acme/directory.feature @@ -9,6 +9,9 @@ Feature: Directory { "newNonce": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-nonce", "newAccount": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-account", - "newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order" + "newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order", + "meta": { + "externalAccountRequired": true + } } """ diff --git a/backend/bdd/features/steps/pki_acme.py b/backend/bdd/features/steps/pki_acme.py index 46b10c13e0..353ec942da 100644 --- a/backend/bdd/features/steps/pki_acme.py +++ b/backend/bdd/features/steps/pki_acme.py @@ -387,6 +387,9 @@ def register_account_with_eab( ): acme_client = context.acme_client account_public_key = acme_client.net.key.public_key() + if not only_return_existing: + # clear the account in case if we want to register twice + acme_client.net.account = None if hasattr(context, "alt_eab_url"): eab_directory = messages.Directory.from_json( {"newAccount": context.alt_eab_url} @@ -406,8 +409,14 @@ def register_account_with_eab( only_return_existing=only_return_existing, ) try: - context.vars[account_var] = acme_client.new_account(registration) + if not only_return_existing: + context.vars[account_var] = acme_client.new_account(registration) + else: + context.vars[account_var] = acme_client.query_registration( + acme_client.net.account + ) except Exception as exp: + logger.error(f"Failed to register: {exp}", exc_info=True) context.vars["error"] = exp @@ -434,6 +443,17 @@ def step_impl(context: Context, email: str, kid: str, secret: str, account_var: ) +@then("I find the existing ACME account without EAB as {account_var}") +def step_impl(context: Context, account_var: str): + acme_client = context.acme_client + # registration = messages.RegistrationResource.from_json(dict(uri="")) + registration = acme_client.net.account + try: + context.vars[account_var] = acme_client.query_registration(registration) + except Exception as exp: + context.vars["error"] = exp + + @then("I register a new ACME account with email {email} without EAB") def step_impl(context: Context, email: str): acme_client = context.acme_client @@ -600,6 +620,19 @@ def step_impl(context: Context, var_path: str, jq_query: str): ) +@then("the value {var_path} with should be absent") +def step_impl(context: Context, var_path: str): + try: + value = eval_var(context, var_path) + except Exception as exp: + if isinstance(exp, KeyError): + return + raise + assert False, ( + f"value at {var_path!r} should be absent, but we got this instead: {value!r}" + ) + + @then('the value {var_path} with jq "{jq_query}" should be equal to {expected}') def step_impl(context: Context, var_path: str, jq_query: str, expected: str): value, result = apply_value_with_jq( @@ -615,13 +648,14 @@ def step_impl(context: Context, var_path: str, jq_query: str, expected: str): @then('the value {var_path} with jq "{jq_query}" should match pattern {regex}') def step_impl(context: Context, var_path: str, jq_query: str, regex: str): + actual_regex = replace_vars(regex, context.vars) value, result = apply_value_with_jq( context=context, var_path=var_path, jq_query=jq_query, ) - assert re.match(replace_vars(regex, context.vars), result), ( - f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {regex!r}" + assert re.match(actual_regex, result), ( + f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {actual_regex!r}" ) diff --git a/backend/bdd/features/steps/utils.py b/backend/bdd/features/steps/utils.py index 4ee7c8921d..93269d8bcb 100644 --- a/backend/bdd/features/steps/utils.py +++ b/backend/bdd/features/steps/utils.py @@ -15,6 +15,7 @@ from josepy import JSONObjectWithFields ACC_KEY_BITS = 2048 ACC_KEY_PUBLIC_EXPONENT = 65537 +NOCK_API_PREFIX = "/api/__bdd_nock__" logger = logging.getLogger(__name__) faker = Faker() @@ -265,7 +266,7 @@ def x509_cert_to_dict(cert: x509.Certificate) -> dict: def define_nock(context: Context, definitions: list[dict]): jwt_token = context.vars["AUTH_TOKEN"] response = context.http_client.post( - "/api/v1/bdd-nock/define", + f"{NOCK_API_PREFIX}/define", headers=dict(authorization="Bearer {}".format(jwt_token)), json=dict(definitions=definitions), ) @@ -275,7 +276,7 @@ def define_nock(context: Context, definitions: list[dict]): def restore_nock(context: Context): jwt_token = context.vars["AUTH_TOKEN"] response = context.http_client.post( - "/api/v1/bdd-nock/restore", + f"{NOCK_API_PREFIX}/restore", headers=dict(authorization="Bearer {}".format(jwt_token)), json=dict(), ) @@ -285,7 +286,7 @@ def restore_nock(context: Context): def clean_all_nock(context: Context): jwt_token = context.vars["AUTH_TOKEN"] response = context.http_client.post( - "/api/v1/bdd-nock/clean-all", + f"{NOCK_API_PREFIX}/clean-all", headers=dict(authorization="Bearer {}".format(jwt_token)), json=dict(), ) diff --git a/backend/nodemon.json b/backend/nodemon.json index 856f9ee51c..2542bca4dc 100644 --- a/backend/nodemon.json +++ b/backend/nodemon.json @@ -1,6 +1,8 @@ { - "watch": ["src"], + "watch": [ + "src" + ], "ext": ".ts,.js", "ignore": [], - "exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine" -} + "exec": "tsx --tsconfig=./tsconfig.dev.json --inspect=0.0.0.0:9229 ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine" +} \ No newline at end of file diff --git a/backend/package-lock.json b/backend/package-lock.json index a871c38b16..808cf3204e 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -128,6 +128,7 @@ "sjcl": "^1.0.8", "smee-client": "^2.0.0", "snowflake-sdk": "^1.14.0", + "ssh2": "^1.17.0", "tedious": "^18.2.1", "tweetnacl": "^1.0.3", "tweetnacl-util": "^0.15.1", @@ -164,6 +165,7 @@ "@types/resolve": "^1.20.6", "@types/safe-regex": "^1.1.6", "@types/sjcl": "^1.0.34", + "@types/ssh2": "^1.15.5", "@types/uuid": "^9.0.7", "@typescript-eslint/eslint-plugin": "^6.20.0", "@typescript-eslint/parser": "^6.20.0", @@ -15634,6 +15636,33 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/ssh2": { + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.5.tgz", + "integrity": "sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "^18.11.18" + } + }, + "node_modules/@types/ssh2/node_modules/@types/node": { + "version": "18.19.130", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz", + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/ssh2/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/sshpk": { "version": "1.10.3", "resolved": "https://registry.npmjs.org/@types/sshpk/-/sshpk-1.10.3.tgz", @@ -18061,6 +18090,15 @@ "dev": true, "license": "MIT" }, + "node_modules/buildcheck": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", + "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "optional": true, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/bullmq": { "version": "5.4.2", "resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.4.2.tgz", @@ -18901,6 +18939,20 @@ "node": ">= 0.10" } }, + "node_modules/cpu-features": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "buildcheck": "~0.0.6", + "nan": "^2.19.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/create-hash": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", @@ -24996,9 +25048,9 @@ } }, "node_modules/nan": { - "version": "2.22.2", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz", - "integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==", + "version": "2.23.1", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.23.1.tgz", + "integrity": "sha512-r7bBUGKzlqk8oPBDYxt6Z0aEdF1G1rwlMcLk8LCOMbOzf0mG+JUfUzG4fIMWwHWP0iyaLWEQZJmtB7nOHEm/qw==", "license": "MIT" }, "node_modules/nanoid": { @@ -31492,6 +31544,23 @@ "node": ">= 0.6" } }, + "node_modules/ssh2": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", + "hasInstallScript": true, + "dependencies": { + "asn1": "^0.2.6", + "bcrypt-pbkdf": "^1.0.2" + }, + "engines": { + "node": ">=10.16.0" + }, + "optionalDependencies": { + "cpu-features": "~0.0.10", + "nan": "^2.23.0" + } + }, "node_modules/sshpk": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", diff --git a/backend/package.json b/backend/package.json index aa97de2ed4..0e17bb2b73 100644 --- a/backend/package.json +++ b/backend/package.json @@ -32,7 +32,7 @@ "binary:clean": "rm -rf ./dist && rm -rf ./binary", "binary:rename-imports": "ts-node ./scripts/rename-mjs.ts", "test": "echo \"Error: no test specified\" && exit 1", - "dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine", + "dev": "tsx watch --clear-screen=false ./src/main.ts --config tsconfig.dev.json | pino-pretty --colorize --colorizeObjects --singleLine", "dev:docker": "nodemon", "build": "tsup --sourcemap", "build:frontend": "npm run build --prefix ../frontend", @@ -110,6 +110,7 @@ "@types/resolve": "^1.20.6", "@types/safe-regex": "^1.1.6", "@types/sjcl": "^1.0.34", + "@types/ssh2": "^1.15.5", "@types/uuid": "^9.0.7", "@typescript-eslint/eslint-plugin": "^6.20.0", "@typescript-eslint/parser": "^6.20.0", @@ -257,6 +258,7 @@ "sjcl": "^1.0.8", "smee-client": "^2.0.0", "snowflake-sdk": "^1.14.0", + "ssh2": "^1.17.0", "tedious": "^18.2.1", "tweetnacl": "^1.0.3", "tweetnacl-util": "^0.15.1", @@ -264,4 +266,4 @@ "zod": "^3.22.4", "zod-to-json-schema": "^3.24.5" } -} +} \ No newline at end of file diff --git a/backend/src/db/migrations/20251119025017_add-unique-constraint-for-pki-acme-account-public-key-and-profile-id.ts b/backend/src/db/migrations/20251119025017_add-unique-constraint-for-pki-acme-account-public-key-and-profile-id.ts new file mode 100644 index 0000000000..5bc4601e33 --- /dev/null +++ b/backend/src/db/migrations/20251119025017_add-unique-constraint-for-pki-acme-account-public-key-and-profile-id.ts @@ -0,0 +1,32 @@ +import { Knex } from "knex"; + +import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists"; +import { TableName } from "@app/db/schemas"; + +const CONSTRAINT_NAME = "unique_pki_acme_account_public_key_and_profile_id"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) { + const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId"); + const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint"); + + if (hasProfileId && hasPublicKeyThumbprint) { + await knex.schema.alterTable(TableName.PkiAcmeAccount, (table) => { + table.unique(["profileId", "publicKeyThumbprint"], { indexName: CONSTRAINT_NAME }); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) { + const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId"); + const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint"); + + await knex.schema.alterTable(TableName.PkiAcmeAccount, async () => { + if (hasProfileId && hasPublicKeyThumbprint) { + await dropConstraintIfExists(TableName.PkiAcmeAccount, CONSTRAINT_NAME, knex); + } + }); + } +} diff --git a/backend/src/ee/routes/v1/pam-account-routers/index.ts b/backend/src/ee/routes/v1/pam-account-routers/index.ts index 60d6214671..d3aadd5a41 100644 --- a/backend/src/ee/routes/v1/pam-account-routers/index.ts +++ b/backend/src/ee/routes/v1/pam-account-routers/index.ts @@ -9,6 +9,11 @@ import { SanitizedPostgresAccountWithResourceSchema, UpdatePostgresAccountSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas"; +import { + CreateSSHAccountSchema, + SanitizedSSHAccountWithResourceSchema, + UpdateSSHAccountSchema +} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas"; import { registerPamResourceEndpoints } from "./pam-account-endpoints"; @@ -30,5 +35,14 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { + registerPamResourceEndpoints({ + server, + resourceType: PamResource.SSH, + accountResponseSchema: SanitizedSSHAccountWithResourceSchema, + createAccountSchema: CreateSSHAccountSchema, + updateAccountSchema: UpdateSSHAccountSchema + }); } }; diff --git a/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts b/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts index 286e0896fa..0b7f89b6ce 100644 --- a/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts +++ b/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts @@ -2,16 +2,21 @@ import { z } from "zod"; import { PamFoldersSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { PamAccountOrderBy, PamAccountView } from "@app/ee/services/pam-account/pam-account-enums"; import { SanitizedMySQLAccountWithResourceSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas"; import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums"; import { SanitizedPostgresAccountWithResourceSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas"; +import { SanitizedSSHAccountWithResourceSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas"; import { BadRequestError } from "@app/lib/errors"; +import { removeTrailingSlash } from "@app/lib/fn"; import { ms } from "@app/lib/ms"; +import { OrderByDirection } from "@app/lib/types"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; const SanitizedAccountSchema = z.union([ + SanitizedSSHAccountWithResourceSchema, // ORDER MATTERS SanitizedPostgresAccountWithResourceSchema, SanitizedMySQLAccountWithResourceSchema ]); @@ -26,33 +31,69 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { schema: { description: "List PAM accounts", querystring: z.object({ - projectId: z.string().uuid() + projectId: z.string().uuid(), + accountPath: z.string().trim().default("/").transform(removeTrailingSlash), + accountView: z.nativeEnum(PamAccountView).default(PamAccountView.Flat), + offset: z.coerce.number().min(0).default(0), + limit: z.coerce.number().min(1).max(100).default(100), + orderBy: z.nativeEnum(PamAccountOrderBy).default(PamAccountOrderBy.Name), + orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC), + search: z.string().trim().optional(), + filterResourceIds: z + .string() + .transform((val) => + val + .split(",") + .map((s) => s.trim()) + .filter(Boolean) + ) + .optional() }), response: { 200: z.object({ accounts: SanitizedAccountSchema.array(), - folders: PamFoldersSchema.array() + folders: PamFoldersSchema.array(), + totalCount: z.number().default(0), + folderId: z.string().optional(), + folderPaths: z.record(z.string(), z.string()) }) } }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const response = await server.services.pamAccount.list(req.query.projectId, req.permission); + const { projectId, accountPath, accountView, limit, offset, search, orderBy, orderDirection, filterResourceIds } = + req.query; + + const { accounts, folders, totalCount, folderId, folderPaths } = await server.services.pamAccount.list({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + accountPath, + accountView, + limit, + offset, + search, + orderBy, + orderDirection, + filterResourceIds + }); await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, orgId: req.permission.orgId, - projectId: req.query.projectId, + projectId, event: { type: EventType.PAM_ACCOUNT_LIST, metadata: { - accountCount: response.accounts.length, - folderCount: response.folders.length + accountCount: accounts.length, + folderCount: folders.length } } }); - return response; + return { accounts, folders, totalCount, folderId, folderPaths }; } }); @@ -93,7 +134,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { gatewayClientPrivateKey: z.string(), gatewayServerCertificateChain: z.string(), relayHost: z.string(), - metadata: z.record(z.string(), z.string()).optional() + metadata: z.record(z.string(), z.string().optional()).optional() }) } }, diff --git a/backend/src/ee/routes/v1/pam-resource-routers/index.ts b/backend/src/ee/routes/v1/pam-resource-routers/index.ts index 8215325981..5dae317da2 100644 --- a/backend/src/ee/routes/v1/pam-resource-routers/index.ts +++ b/backend/src/ee/routes/v1/pam-resource-routers/index.ts @@ -9,6 +9,11 @@ import { SanitizedPostgresResourceSchema, UpdatePostgresResourceSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas"; +import { + CreateSSHResourceSchema, + SanitizedSSHResourceSchema, + UpdateSSHResourceSchema +} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas"; import { registerPamResourceEndpoints } from "./pam-resource-endpoints"; @@ -30,5 +35,14 @@ export const PAM_RESOURCE_REGISTER_ROUTER_MAP: Record { + registerPamResourceEndpoints({ + server, + resourceType: PamResource.SSH, + resourceResponseSchema: SanitizedSSHResourceSchema, + createResourceSchema: CreateSSHResourceSchema, + updateResourceSchema: UpdateSSHResourceSchema + }); } }; diff --git a/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts b/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts index 6563c86c7d..3536e7a99d 100644 --- a/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts +++ b/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts @@ -5,19 +5,30 @@ import { MySQLResourceListItemSchema, SanitizedMySQLResourceSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas"; +import { PamResourceOrderBy } from "@app/ee/services/pam-resource/pam-resource-enums"; import { PostgresResourceListItemSchema, SanitizedPostgresResourceSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas"; +import { + SanitizedSSHResourceSchema, + SSHResourceListItemSchema +} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas"; +import { OrderByDirection } from "@app/lib/types"; import { readLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; -const SanitizedResourceSchema = z.union([SanitizedPostgresResourceSchema, SanitizedMySQLResourceSchema]); +const SanitizedResourceSchema = z.union([ + SanitizedPostgresResourceSchema, + SanitizedMySQLResourceSchema, + SanitizedSSHResourceSchema +]); const ResourceOptionsSchema = z.discriminatedUnion("resource", [ PostgresResourceListItemSchema, - MySQLResourceListItemSchema + MySQLResourceListItemSchema, + SSHResourceListItemSchema ]); export const registerPamResourceRouter = async (server: FastifyZodProvider) => { @@ -52,17 +63,46 @@ export const registerPamResourceRouter = async (server: FastifyZodProvider) => { schema: { description: "List PAM resources", querystring: z.object({ - projectId: z.string().uuid() + projectId: z.string().uuid(), + offset: z.coerce.number().min(0).default(0), + limit: z.coerce.number().min(1).max(100).default(100), + orderBy: z.nativeEnum(PamResourceOrderBy).default(PamResourceOrderBy.Name), + orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC), + search: z.string().trim().optional(), + filterResourceTypes: z + .string() + .transform((val) => + val + .split(",") + .map((s) => s.trim()) + .filter(Boolean) + ) + .optional() }), response: { 200: z.object({ - resources: SanitizedResourceSchema.array() + resources: SanitizedResourceSchema.array(), + totalCount: z.number().default(0) }) } }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const response = await server.services.pamResource.list(req.query.projectId, req.permission); + const { projectId, limit, offset, search, orderBy, orderDirection, filterResourceTypes } = req.query; + + const { resources, totalCount } = await server.services.pamResource.list({ + actorId: req.permission.id, + actor: req.permission.type, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + projectId, + limit, + offset, + search, + orderBy, + orderDirection, + filterResourceTypes + }); await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, @@ -71,12 +111,12 @@ export const registerPamResourceRouter = async (server: FastifyZodProvider) => { event: { type: EventType.PAM_RESOURCE_LIST, metadata: { - count: response.resources.length + count: resources.length } } }); - return response; + return { resources, totalCount }; } }); }; diff --git a/backend/src/ee/routes/v1/pam-session-router.ts b/backend/src/ee/routes/v1/pam-session-router.ts index 5fe10e434b..3c39a9516e 100644 --- a/backend/src/ee/routes/v1/pam-session-router.ts +++ b/backend/src/ee/routes/v1/pam-session-router.ts @@ -4,12 +4,21 @@ import { PamSessionsSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { MySQLSessionCredentialsSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas"; import { PostgresSessionCredentialsSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas"; -import { PamSessionCommandLogSchema, SanitizedSessionSchema } from "@app/ee/services/pam-session/pam-session-schemas"; +import { SSHSessionCredentialsSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas"; +import { + PamSessionCommandLogSchema, + SanitizedSessionSchema, + TerminalEventSchema +} from "@app/ee/services/pam-session/pam-session-schemas"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; -const SessionCredentialsSchema = z.union([PostgresSessionCredentialsSchema, MySQLSessionCredentialsSchema]); +const SessionCredentialsSchema = z.union([ + SSHSessionCredentialsSchema, + PostgresSessionCredentialsSchema, + MySQLSessionCredentialsSchema +]); export const registerPamSessionRouter = async (server: FastifyZodProvider) => { // Meant to be hit solely by gateway identities @@ -32,17 +41,15 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => { }, onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const { credentials, projectId, account } = await server.services.pamAccount.getSessionCredentials( - req.params.sessionId, - req.permission - ); + const { credentials, projectId, account, sessionStarted } = + await server.services.pamAccount.getSessionCredentials(req.params.sessionId, req.permission); await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, orgId: req.permission.orgId, projectId, event: { - type: EventType.PAM_SESSION_START, + type: EventType.PAM_SESSION_CREDENTIALS_GET, metadata: { sessionId: req.params.sessionId, accountName: account.name @@ -50,7 +57,22 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => { } }); - return { credentials }; + if (sessionStarted) { + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + projectId, + event: { + type: EventType.PAM_SESSION_START, + metadata: { + sessionId: req.params.sessionId, + accountName: account.name + } + } + }); + } + + return { credentials: credentials as z.infer }; } }); @@ -67,7 +89,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => { sessionId: z.string().uuid() }), body: z.object({ - logs: PamSessionCommandLogSchema.array() + logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema])) }), response: { 200: z.object({ diff --git a/backend/src/ee/services/audit-log/audit-log-types.ts b/backend/src/ee/services/audit-log/audit-log-types.ts index de3ce9af6a..ab5b126c67 100644 --- a/backend/src/ee/services/audit-log/audit-log-types.ts +++ b/backend/src/ee/services/audit-log/audit-log-types.ts @@ -186,6 +186,7 @@ export enum EventType { CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth", UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth", GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth", + GET_TOKEN_IDENTITY_TOKEN_AUTH = "get-token-identity-token-auth", ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth", UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth", @@ -535,6 +536,7 @@ export enum EventType { DASHBOARD_GET_SECRET_VALUE = "dashboard-get-secret-value", DASHBOARD_GET_SECRET_VERSION_VALUE = "dashboard-get-secret-version-value", + PAM_SESSION_CREDENTIALS_GET = "pam-session-credentials-get", PAM_SESSION_START = "pam-session-start", PAM_SESSION_LOGS_UPDATE = "pam-session-logs-update", PAM_SESSION_END = "pam-session-end", @@ -1029,6 +1031,15 @@ interface GetTokensIdentityTokenAuthEvent { }; } +interface GetTokenIdentityTokenAuthEvent { + type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH; + metadata: { + identityId: string; + identityName: string; + tokenId: string; + }; +} + interface AddIdentityTokenAuthEvent { type: EventType.ADD_IDENTITY_TOKEN_AUTH; metadata: { @@ -3978,6 +3989,14 @@ interface OrgRoleDeleteEvent { }; } +interface PamSessionCredentialsGetEvent { + type: EventType.PAM_SESSION_CREDENTIALS_GET; + metadata: { + sessionId: string; + accountName: string; + }; +} + interface PamSessionStartEvent { type: EventType.PAM_SESSION_START; metadata: { @@ -4214,6 +4233,7 @@ export type Event = | CreateTokenIdentityTokenAuthEvent | UpdateTokenIdentityTokenAuthEvent | GetTokensIdentityTokenAuthEvent + | GetTokenIdentityTokenAuthEvent | AddIdentityTokenAuthEvent | UpdateIdentityTokenAuthEvent | GetIdentityTokenAuthEvent @@ -4531,6 +4551,7 @@ export type Event = | OrgRoleCreateEvent | OrgRoleUpdateEvent | OrgRoleDeleteEvent + | PamSessionCredentialsGetEvent | PamSessionStartEvent | PamSessionLogsUpdateEvent | PamSessionEndEvent diff --git a/backend/src/ee/services/license/__mocks__/license-fns.ts b/backend/src/ee/services/license/__mocks__/license-fns.ts index d303859bb3..2f29e4812a 100644 --- a/backend/src/ee/services/license/__mocks__/license-fns.ts +++ b/backend/src/ee/services/license/__mocks__/license-fns.ts @@ -39,3 +39,9 @@ export const getDefaultOnPremFeatures = () => { }; export const setupLicenseRequestWithStore = () => {}; + +export const getLicenseKeyConfig = () => { + return { + isValid: false + }; +}; diff --git a/backend/src/ee/services/license/license-fns.ts b/backend/src/ee/services/license/license-fns.ts index 14b7bcfbd8..09ff9e1081 100644 --- a/backend/src/ee/services/license/license-fns.ts +++ b/backend/src/ee/services/license/license-fns.ts @@ -1,13 +1,56 @@ import axios, { AxiosError } from "axios"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; -import { getConfig } from "@app/lib/config/env"; +import { getConfig, TEnvConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { UserAliasType } from "@app/services/user-alias/user-alias-types"; -import { TFeatureSet } from "./license-types"; +import { LicenseType, TFeatureSet, TLicenseKeyConfig, TOfflineLicenseContents } from "./license-types"; + +export const isOfflineLicenseKey = (licenseKey: string): boolean => { + try { + const contents = JSON.parse(Buffer.from(licenseKey, "base64").toString("utf8")) as TOfflineLicenseContents; + + return "signature" in contents && "license" in contents; + } catch (error) { + return false; + } +}; + +export const getLicenseKeyConfig = ( + config?: Pick +): TLicenseKeyConfig => { + const cfg = config || getConfig(); + + if (!cfg) { + return { isValid: false }; + } + + const licenseKey = cfg.LICENSE_KEY; + + if (licenseKey) { + if (isOfflineLicenseKey(licenseKey)) { + return { isValid: true, licenseKey, type: LicenseType.Offline }; + } + + return { isValid: true, licenseKey, type: LicenseType.Online }; + } + + const offlineLicenseKey = cfg.LICENSE_KEY_OFFLINE; + + // backwards compatibility + if (offlineLicenseKey) { + if (isOfflineLicenseKey(offlineLicenseKey)) { + return { isValid: true, licenseKey: offlineLicenseKey, type: LicenseType.Offline }; + } + + return { isValid: false }; + } + + return { isValid: false }; +}; export const getDefaultOnPremFeatures = (): TFeatureSet => ({ _id: null, diff --git a/backend/src/ee/services/license/license-service.ts b/backend/src/ee/services/license/license-service.ts index bbd6147ed3..3bbd58831a 100644 --- a/backend/src/ee/services/license/license-service.ts +++ b/backend/src/ee/services/license/license-service.ts @@ -22,9 +22,10 @@ import { OrgPermissionBillingActions, OrgPermissionSubjects } from "../permissio import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums"; import { TLicenseDALFactory } from "./license-dal"; -import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns"; +import { getDefaultOnPremFeatures, getLicenseKeyConfig, setupLicenseRequestWithStore } from "./license-fns"; import { InstanceType, + LicenseType, TAddOrgPmtMethodDTO, TAddOrgTaxIdDTO, TCreateOrgPortalSession, @@ -77,6 +78,7 @@ export const licenseServiceFactory = ({ let instanceType = InstanceType.OnPrem; let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures(); let selfHostedLicense: TOfflineLicense | null = null; + const licenseKeyConfig = getLicenseKeyConfig(envConfig); const licenseServerCloudApi = setupLicenseRequestWithStore( envConfig.LICENSE_SERVER_URL || "", @@ -85,10 +87,13 @@ export const licenseServiceFactory = ({ envConfig.INTERNAL_REGION ); + const onlineLicenseKey = + licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online ? licenseKeyConfig.licenseKey : ""; + const licenseServerOnPremApi = setupLicenseRequestWithStore( envConfig.LICENSE_SERVER_URL || "", LICENSE_SERVER_ON_PREM_LOGIN, - envConfig.LICENSE_KEY || "", + onlineLicenseKey, envConfig.INTERNAL_REGION ); @@ -131,7 +136,7 @@ export const licenseServiceFactory = ({ return; } - if (envConfig.LICENSE_KEY) { + if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online) { const token = await licenseServerOnPremApi.refreshLicense(); if (token) { await syncLicenseKeyOnPremFeatures(true); @@ -142,10 +147,10 @@ export const licenseServiceFactory = ({ return; } - if (envConfig.LICENSE_KEY_OFFLINE) { + if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline) { let isValidOfflineLicense = true; const contents: TOfflineLicenseContents = JSON.parse( - Buffer.from(envConfig.LICENSE_KEY_OFFLINE, "base64").toString("utf8") + Buffer.from(licenseKeyConfig.licenseKey, "base64").toString("utf8") ); const isVerified = await verifyOfflineLicense(JSON.stringify(contents.license), contents.signature); @@ -184,7 +189,7 @@ export const licenseServiceFactory = ({ }; const initializeBackgroundSync = async () => { - if (envConfig.LICENSE_KEY) { + if (licenseKeyConfig?.isValid && licenseKeyConfig?.type === LicenseType.Online) { logger.info("Setting up background sync process for refresh onPremFeatures"); const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures); job.start(); diff --git a/backend/src/ee/services/license/license-types.ts b/backend/src/ee/services/license/license-types.ts index 5157b0730d..8897eaabcf 100644 --- a/backend/src/ee/services/license/license-types.ts +++ b/backend/src/ee/services/license/license-types.ts @@ -136,3 +136,18 @@ export type TDelOrgTaxIdDTO = TOrgPermission & { taxId: string }; export type TOrgInvoiceDTO = TOrgPermission; export type TOrgLicensesDTO = TOrgPermission; + +export enum LicenseType { + Offline = "offline", + Online = "online" +} + +export type TLicenseKeyConfig = + | { + isValid: false; + } + | { + isValid: true; + licenseKey: string; + type: LicenseType; + }; diff --git a/backend/src/ee/services/pam-account/pam-account-dal.ts b/backend/src/ee/services/pam-account/pam-account-dal.ts index 6ef7df76eb..5fa2426278 100644 --- a/backend/src/ee/services/pam-account/pam-account-dal.ts +++ b/backend/src/ee/services/pam-account/pam-account-dal.ts @@ -1,46 +1,109 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName, TPamAccounts } from "@app/db/schemas"; -import { buildFindFilter, ormify, prependTableNameToFindFilter, selectAllTableCols } from "@app/lib/knex"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; + +import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums"; export type TPamAccountDALFactory = ReturnType; -type PamAccountFindFilter = Parameters>[0]; - export const pamAccountDALFactory = (db: TDbClient) => { const orm = ormify(db, TableName.PamAccount); - const findWithResourceDetails = async (filter: PamAccountFindFilter, tx?: Knex) => { - const query = (tx || db.replicaNode())(TableName.PamAccount) - .leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`) - .select(selectAllTableCols(TableName.PamAccount)) - .select( + const findByProjectIdWithResourceDetails = async ( + { + projectId, + folderId, + accountView = PamAccountView.Nested, + search, + limit, + offset = 0, + orderBy = PamAccountOrderBy.Name, + orderDirection = OrderByDirection.ASC, + filterResourceIds + }: { + projectId: string; + folderId?: string | null; + accountView?: PamAccountView; + search?: string; + limit?: number; + offset?: number; + orderBy?: PamAccountOrderBy; + orderDirection?: OrderByDirection; + filterResourceIds?: string[]; + }, + tx?: Knex + ) => { + try { + const dbInstance = tx || db.replicaNode(); + const query = dbInstance(TableName.PamAccount) + .leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`) + .where(`${TableName.PamAccount}.projectId`, projectId); + + if (accountView === PamAccountView.Nested) { + if (folderId) { + void query.where(`${TableName.PamAccount}.folderId`, folderId); + } else { + void query.whereNull(`${TableName.PamAccount}.folderId`); + } + } + + if (search) { + // escape special characters (`%`, `_`) and the escape character itself (`\`) + const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_"); + const pattern = `%${escapedSearch}%`; + void query.where((q) => { + void q + .whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "name", pattern]) + .orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "name", pattern]) + .orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "description", pattern]); + }); + } + + if (filterResourceIds && filterResourceIds.length) { + void query.whereIn(`${TableName.PamAccount}.resourceId`, filterResourceIds); + } + + const countQuery = query.clone().count("*", { as: "count" }).first(); + + void query.select(selectAllTableCols(TableName.PamAccount)).select( // resource db.ref("name").withSchema(TableName.PamResource).as("resourceName"), db.ref("resourceType").withSchema(TableName.PamResource), db.ref("encryptedRotationAccountCredentials").withSchema(TableName.PamResource) ); - if (filter) { - /* eslint-disable @typescript-eslint/no-misused-promises */ - void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.PamAccount, filter))); + const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC"; + + void query.orderByRaw(`${TableName.PamAccount}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]); + + if (typeof limit === "number") { + void query.limit(limit).offset(offset); + } + + const [results, countResult] = await Promise.all([query, countQuery]); + const totalCount = Number(countResult?.count || 0); + + const accounts = results.map( + // @ts-expect-error resourceName, resourceType, encryptedRotationAccountCredentials are from joined table + ({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, ...account }) => ({ + ...account, + resourceId, + resource: { + id: resourceId, + name: resourceName as string, + resourceType, + encryptedRotationAccountCredentials + } + }) + ); + return { accounts, totalCount }; + } catch (error) { + throw new DatabaseError({ error, name: "Find PAM accounts with resource details" }); } - - const accounts = await query; - - return accounts.map( - ({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, ...account }) => ({ - ...account, - resourceId, - resource: { - id: resourceId, - name: resourceName, - resourceType, - encryptedRotationAccountCredentials - } - }) - ); }; const findAccountsDueForRotation = async (tx?: Knex) => { @@ -59,5 +122,9 @@ export const pamAccountDALFactory = (db: TDbClient) => { return accounts; }; - return { ...orm, findWithResourceDetails, findAccountsDueForRotation }; + return { + ...orm, + findByProjectIdWithResourceDetails, + findAccountsDueForRotation + }; }; diff --git a/backend/src/ee/services/pam-account/pam-account-enums.ts b/backend/src/ee/services/pam-account/pam-account-enums.ts new file mode 100644 index 0000000000..92b95df947 --- /dev/null +++ b/backend/src/ee/services/pam-account/pam-account-enums.ts @@ -0,0 +1,8 @@ +export enum PamAccountOrderBy { + Name = "name" +} + +export enum PamAccountView { + Flat = "flat", + Nested = "nested" +} diff --git a/backend/src/ee/services/pam-account/pam-account-service.ts b/backend/src/ee/services/pam-account/pam-account-service.ts index 2f66d28d74..1eae8df15c 100644 --- a/backend/src/ee/services/pam-account/pam-account-service.ts +++ b/backend/src/ee/services/pam-account/pam-account-service.ts @@ -1,6 +1,6 @@ import { ForbiddenError, subject } from "@casl/ability"; -import { ActionProjectType, OrganizationActionScope, TPamAccounts, TPamResources } from "@app/db/schemas"; +import { ActionProjectType, OrganizationActionScope, TPamAccounts, TPamFolders, TPamResources } from "@app/db/schemas"; import { PAM_RESOURCE_FACTORY_MAP } from "@app/ee/services/pam-resource/pam-resource-factory"; import { decryptResource, decryptResourceConnectionDetails } from "@app/ee/services/pam-resource/pam-resource-fns"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; @@ -27,12 +27,14 @@ import { getFullPamFolderPath } from "../pam-folder/pam-folder-fns"; import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal"; import { PamResource } from "../pam-resource/pam-resource-enums"; import { TPamAccountCredentials } from "../pam-resource/pam-resource-types"; +import { TSqlResourceConnectionDetails } from "../pam-resource/shared/sql/sql-resource-types"; import { TPamSessionDALFactory } from "../pam-session/pam-session-dal"; import { PamSessionStatus } from "../pam-session/pam-session-enums"; import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission"; import { TPamAccountDALFactory } from "./pam-account-dal"; +import { PamAccountView } from "./pam-account-enums"; import { decryptAccount, decryptAccountCredentials, encryptAccountCredentials } from "./pam-account-fns"; -import { TAccessAccountDTO, TCreateAccountDTO, TUpdateAccountDTO } from "./pam-account-types"; +import { TAccessAccountDTO, TCreateAccountDTO, TListAccountsDTO, TUpdateAccountDTO } from "./pam-account-types"; type TPamAccountServiceFactoryDep = { pamResourceDAL: TPamResourceDALFactory; @@ -251,17 +253,17 @@ export const pamAccountServiceFactory = ({ gatewayV2Service ); - // Logic to prevent overwriting unedited censored values - const finalCredentials = { ...credentials }; - if (credentials.password === "__INFISICAL_UNCHANGED__") { - const decryptedCredentials = await decryptAccountCredentials({ - encryptedCredentials: account.encryptedCredentials, - projectId: account.projectId, - kmsService - }); + const decryptedCredentials = await decryptAccountCredentials({ + encryptedCredentials: account.encryptedCredentials, + projectId: account.projectId, + kmsService + }); - finalCredentials.password = decryptedCredentials.password; - } + // Logic to prevent overwriting unedited censored values + const finalCredentials = await factory.handleOverwritePreventionForCensoredValues( + credentials, + decryptedCredentials + ); const validatedCredentials = await factory.validateAccountCredentials(finalCredentials); const encryptedCredentials = await encryptAccountCredentials({ @@ -334,21 +336,96 @@ export const pamAccountServiceFactory = ({ }; }; - const list = async (projectId: string, actor: OrgServiceActor) => { + const list = async ({ + projectId, + accountPath, + accountView, + actor, + actorId, + actorAuthMethod, + actorOrgId, + ...params + }: TListAccountsDTO) => { const { permission } = await permissionService.getProjectPermission({ - actor: actor.type, - actorAuthMethod: actor.authMethod, - actorId: actor.id, - actorOrgId: actor.orgId, + actor, + actorId, projectId, + actorAuthMethod, + actorOrgId, actionProjectType: ActionProjectType.PAM }); - const accountsWithResourceDetails = await pamAccountDAL.findWithResourceDetails({ projectId }); + const limit = params.limit || 20; + const offset = params.offset || 0; const canReadFolders = permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.PamFolders); - const folders = canReadFolders ? await pamFolderDAL.find({ projectId }) : []; + const folder = accountPath === "/" ? null : await pamFolderDAL.findByPath(projectId, accountPath); + if (accountPath !== "/" && !folder) { + return { accounts: [], folders: [], totalCount: 0, folderPaths: {} }; + } + const folderId = folder?.id; + + let totalFolderCount = 0; + if (canReadFolders && accountView === PamAccountView.Nested) { + const { totalCount } = await pamFolderDAL.findByProjectId({ + projectId, + parentId: folderId, + search: params.search + }); + totalFolderCount = totalCount; + } + + let folders: TPamFolders[] = []; + if (canReadFolders && accountView === PamAccountView.Nested && offset < totalFolderCount) { + const folderLimit = Math.min(limit, totalFolderCount - offset); + const { folders: foldersResp } = await pamFolderDAL.findByProjectId({ + projectId, + parentId: folderId, + limit: folderLimit, + offset, + search: params.search, + orderBy: params.orderBy, + orderDirection: params.orderDirection + }); + + folders = foldersResp; + } + + let accountsWithResourceDetails: Awaited< + ReturnType + >["accounts"] = []; + let totalAccountCount = 0; + + const accountsToFetch = limit - folders.length; + if (accountsToFetch > 0) { + const accountOffset = Math.max(0, offset - totalFolderCount); + const { accounts, totalCount } = await pamAccountDAL.findByProjectIdWithResourceDetails({ + projectId, + folderId, + accountView, + offset: accountOffset, + limit: accountsToFetch, + search: params.search, + orderBy: params.orderBy, + orderDirection: params.orderDirection, + filterResourceIds: params.filterResourceIds + }); + accountsWithResourceDetails = accounts; + totalAccountCount = totalCount; + } else { + // if no accounts are to be fetched for the current page, we still need the total count for pagination + const { totalCount } = await pamAccountDAL.findByProjectIdWithResourceDetails({ + projectId, + folderId, + accountView, + search: params.search, + filterResourceIds: params.filterResourceIds + }); + totalAccountCount = totalCount; + } + + const totalCount = totalFolderCount + totalAccountCount; const decryptedAndPermittedAccounts: Array< TPamAccounts & { @@ -359,12 +436,6 @@ export const pamAccountServiceFactory = ({ > = []; for await (const account of accountsWithResourceDetails) { - const accountPath = await getFullPamFolderPath({ - pamFolderDAL, - folderId: account.folderId, - projectId: account.projectId - }); - // Check permission for each individual account if ( permission.can( @@ -391,9 +462,27 @@ export const pamAccountServiceFactory = ({ } } + const folderPaths: Record = {}; + const accountFolderIds = [ + ...new Set(decryptedAndPermittedAccounts.flatMap((a) => (a.folderId ? [a.folderId] : []))) + ]; + + await Promise.all( + accountFolderIds.map(async (fId) => { + folderPaths[fId] = await getFullPamFolderPath({ + pamFolderDAL, + folderId: fId, + projectId + }); + }) + ); + return { accounts: decryptedAndPermittedAccounts, - folders + folders, + totalCount, + folderId, + folderPaths }; }; @@ -486,11 +575,11 @@ export const pamAccountServiceFactory = ({ case PamResource.Postgres: case PamResource.MySQL: { - const connectionCredentials = await decryptResourceConnectionDetails({ + const connectionCredentials = (await decryptResourceConnectionDetails({ encryptedConnectionDetails: resource.encryptedConnectionDetails, kmsService, projectId: account.projectId - }); + })) as TSqlResourceConnectionDetails; const credentials = await decryptAccountCredentials({ encryptedCredentials: account.encryptedCredentials, @@ -506,6 +595,19 @@ export const pamAccountServiceFactory = ({ }; } break; + case PamResource.SSH: + { + const credentials = await decryptAccountCredentials({ + encryptedCredentials: account.encryptedCredentials, + kmsService, + projectId: account.projectId + }); + + metadata = { + username: credentials.username + }; + } + break; default: break; } @@ -566,11 +668,6 @@ export const pamAccountServiceFactory = ({ throw new BadRequestError({ message: "Session has ended or expired" }); } - // Verify that the session has not already had credentials fetched - if (session.status !== PamSessionStatus.Starting) { - throw new BadRequestError({ message: "Session has already been started" }); - } - const account = await pamAccountDAL.findById(session.accountId); if (!account) throw new NotFoundError({ message: `Account with ID '${session.accountId}' not found` }); @@ -587,11 +684,16 @@ export const pamAccountServiceFactory = ({ const decryptedResource = await decryptResource(resource, session.projectId, kmsService); + let sessionStarted = false; + // Mark session as started - await pamSessionDAL.updateById(sessionId, { - status: PamSessionStatus.Active, - startedAt: new Date() - }); + if (session.status === PamSessionStatus.Starting) { + await pamSessionDAL.updateById(sessionId, { + status: PamSessionStatus.Active, + startedAt: new Date() + }); + sessionStarted = true; + } return { credentials: { @@ -599,7 +701,8 @@ export const pamAccountServiceFactory = ({ ...decryptedAccount.credentials }, projectId: project.id, - account + account, + sessionStarted }; }; diff --git a/backend/src/ee/services/pam-account/pam-account-types.ts b/backend/src/ee/services/pam-account/pam-account-types.ts index 4bbccc6faf..b8498036e1 100644 --- a/backend/src/ee/services/pam-account/pam-account-types.ts +++ b/backend/src/ee/services/pam-account/pam-account-types.ts @@ -1,4 +1,7 @@ +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; + import { TPamAccount } from "../pam-resource/pam-resource-types"; +import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums"; // DTOs export type TCreateAccountDTO = Pick< @@ -18,3 +21,14 @@ export type TAccessAccountDTO = { actorUserAgent: string; duration: number; }; + +export type TListAccountsDTO = { + accountPath: string; + accountView: PamAccountView; + search?: string; + orderBy?: PamAccountOrderBy; + orderDirection?: OrderByDirection; + limit?: number; + offset?: number; + filterResourceIds?: string[]; +} & TProjectPermission; diff --git a/backend/src/ee/services/pam-folder/pam-folder-dal.ts b/backend/src/ee/services/pam-folder/pam-folder-dal.ts index aa334618d9..0b8aa8f60c 100644 --- a/backend/src/ee/services/pam-folder/pam-folder-dal.ts +++ b/backend/src/ee/services/pam-folder/pam-folder-dal.ts @@ -1,9 +1,106 @@ +import { Knex } from "knex"; + import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; -import { ormify } from "@app/lib/knex"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; + +import { PamAccountOrderBy } from "../pam-account/pam-account-enums"; export type TPamFolderDALFactory = ReturnType; export const pamFolderDALFactory = (db: TDbClient) => { const orm = ormify(db, TableName.PamFolder); - return { ...orm }; + + const findByProjectId = async ( + { + projectId, + parentId, + search, + limit, + offset = 0, + orderBy = PamAccountOrderBy.Name, + orderDirection = OrderByDirection.ASC + }: { + projectId: string; + parentId?: string | null; + search?: string; + limit?: number; + offset?: number; + orderBy?: PamAccountOrderBy; + orderDirection?: OrderByDirection; + }, + tx?: Knex + ) => { + try { + const dbInstance = tx || db.replicaNode(); + const query = dbInstance(TableName.PamFolder).where(`${TableName.PamFolder}.projectId`, projectId); + + if (parentId) { + void query.where(`${TableName.PamFolder}.parentId`, parentId); + } else { + void query.whereNull(`${TableName.PamFolder}.parentId`); + } + + if (search) { + // escape special characters (`%`, `_`) and the escape character itself (`\`) + const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_"); + void query.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamFolder, "name", `%${escapedSearch}%`]); + } + + const countQuery = query.clone().count("*", { as: "count" }).first(); + + void query.select(selectAllTableCols(TableName.PamFolder)); + const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC"; + + void query.orderByRaw(`${TableName.PamFolder}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]); + + if (typeof limit === "number") { + void query.limit(limit).offset(offset); + } + + const [folders, countResult] = await Promise.all([query, countQuery]); + const totalCount = Number(countResult?.count || 0); + + return { folders, totalCount }; + } catch (error) { + throw new DatabaseError({ error, name: "Find PAM folders" }); + } + }; + + const findByPath = async (projectId: string, path: string, tx?: Knex) => { + try { + const dbInstance = tx || db.replicaNode(); + const pathSegments = path.split("/").filter(Boolean); + + let parentId: string | null = null; + let currentFolder: Awaited> | undefined; + + for await (const segment of pathSegments) { + const query = dbInstance(TableName.PamFolder) + .where(`${TableName.PamFolder}.projectId`, projectId) + .where(`${TableName.PamFolder}.name`, segment); + + if (parentId) { + void query.where(`${TableName.PamFolder}.parentId`, parentId); + } else { + void query.whereNull(`${TableName.PamFolder}.parentId`); + } + + currentFolder = await query.first(); + + if (!currentFolder) { + return undefined; + } + + parentId = currentFolder.id; + } + + return currentFolder; + } catch (error) { + throw new DatabaseError({ error, name: "Find PAM folder by path" }); + } + }; + + return { ...orm, findByProjectId, findByPath }; }; diff --git a/backend/src/ee/services/pam-resource/pam-resource-dal.ts b/backend/src/ee/services/pam-resource/pam-resource-dal.ts index 1a408ca27a..9e5cbc9857 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-dal.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-dal.ts @@ -2,7 +2,11 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; + +import { PamResourceOrderBy } from "./pam-resource-enums"; export type TPamResourceDALFactory = ReturnType; export const pamResourceDALFactory = (db: TDbClient) => { @@ -20,5 +24,65 @@ export const pamResourceDALFactory = (db: TDbClient) => { return doc; }; - return { ...orm, findById }; + const findByProjectId = async ( + { + projectId, + search, + limit, + offset = 0, + orderBy = PamResourceOrderBy.Name, + orderDirection = OrderByDirection.ASC, + filterResourceTypes + }: { + projectId: string; + search?: string; + limit?: number; + offset?: number; + orderBy?: PamResourceOrderBy; + orderDirection?: OrderByDirection; + filterResourceTypes?: string[]; + }, + tx?: Knex + ) => { + try { + const dbInstance = tx || db.replicaNode(); + const query = dbInstance(TableName.PamResource).where(`${TableName.PamResource}.projectId`, projectId); + + if (search) { + // escape special characters (`%`, `_`) and the escape character itself (`\`) + const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_"); + const pattern = `%${escapedSearch}%`; + void query.where((q) => { + void q + .whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "name", pattern]) + .orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "resourceType", pattern]); + }); + } + + if (filterResourceTypes && filterResourceTypes.length) { + void query.whereIn(`${TableName.PamResource}.resourceType`, filterResourceTypes); + } + + const countQuery = query.clone().count("*", { as: "count" }).first(); + + void query.select(selectAllTableCols(TableName.PamResource)); + + const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC"; + + void query.orderByRaw(`${TableName.PamResource}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]); + + if (typeof limit === "number") { + void query.limit(limit).offset(offset); + } + + const [resources, countResult] = await Promise.all([query, countQuery]); + const totalCount = Number(countResult?.count || 0); + + return { resources, totalCount }; + } catch (error) { + throw new DatabaseError({ error, name: "Find PAM resources" }); + } + }; + + return { ...orm, findById, findByProjectId }; }; diff --git a/backend/src/ee/services/pam-resource/pam-resource-enums.ts b/backend/src/ee/services/pam-resource/pam-resource-enums.ts index dff1cc650c..e4ec043e14 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-enums.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-enums.ts @@ -1,4 +1,9 @@ export enum PamResource { Postgres = "postgres", - MySQL = "mysql" + MySQL = "mysql", + SSH = "ssh" +} + +export enum PamResourceOrderBy { + Name = "name" } diff --git a/backend/src/ee/services/pam-resource/pam-resource-factory.ts b/backend/src/ee/services/pam-resource/pam-resource-factory.ts index 151fa7ea13..e2d0a50f81 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-factory.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-factory.ts @@ -1,10 +1,12 @@ import { PamResource } from "./pam-resource-enums"; import { TPamAccountCredentials, TPamResourceConnectionDetails, TPamResourceFactory } from "./pam-resource-types"; import { sqlResourceFactory } from "./shared/sql/sql-resource-factory"; +import { sshResourceFactory } from "./ssh/ssh-resource-factory"; type TPamResourceFactoryImplementation = TPamResourceFactory; export const PAM_RESOURCE_FACTORY_MAP: Record = { [PamResource.Postgres]: sqlResourceFactory as TPamResourceFactoryImplementation, - [PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation + [PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation, + [PamResource.SSH]: sshResourceFactory as TPamResourceFactoryImplementation }; diff --git a/backend/src/ee/services/pam-resource/pam-resource-service.ts b/backend/src/ee/services/pam-resource/pam-resource-service.ts index d97905dbe1..0ebca02b57 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-service.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-service.ts @@ -20,7 +20,7 @@ import { encryptResourceConnectionDetails, listResourceOptions } from "./pam-resource-fns"; -import { TCreateResourceDTO, TUpdateResourceDTO } from "./pam-resource-types"; +import { TCreateResourceDTO, TListResourcesDTO, TUpdateResourceDTO } from "./pam-resource-types"; type TPamResourceServiceFactoryDep = { pamResourceDAL: TPamResourceDALFactory; @@ -192,19 +192,18 @@ export const pamResourceServiceFactory = ({ gatewayV2Service ); - // Logic to prevent overwriting unedited censored values - const finalCredentials = { ...rotationAccountCredentials }; - if ( - resource.encryptedRotationAccountCredentials && - rotationAccountCredentials.password === "__INFISICAL_UNCHANGED__" - ) { + let finalCredentials = { ...rotationAccountCredentials }; + if (resource.encryptedRotationAccountCredentials) { const decryptedCredentials = await decryptAccountCredentials({ encryptedCredentials: resource.encryptedRotationAccountCredentials, projectId: resource.projectId, kmsService }); - finalCredentials.password = decryptedCredentials.password; + finalCredentials = await factory.handleOverwritePreventionForCensoredValues( + rotationAccountCredentials, + decryptedCredentials + ); } try { @@ -268,22 +267,23 @@ export const pamResourceServiceFactory = ({ } }; - const list = async (projectId: string, actor: OrgServiceActor) => { + const list = async ({ projectId, actor, actorId, actorAuthMethod, actorOrgId, ...params }: TListResourcesDTO) => { const { permission } = await permissionService.getProjectPermission({ - actor: actor.type, - actorAuthMethod: actor.authMethod, - actorId: actor.id, - actorOrgId: actor.orgId, + actor, + actorId, + actorAuthMethod, + actorOrgId, projectId, actionProjectType: ActionProjectType.PAM }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PamResources); - const resources = await pamResourceDAL.find({ projectId }); + const { resources, totalCount } = await pamResourceDAL.findByProjectId({ projectId, ...params }); return { - resources: await Promise.all(resources.map((resource) => decryptResource(resource, projectId, kmsService))) + resources: await Promise.all(resources.map((resource) => decryptResource(resource, projectId, kmsService))), + totalCount }; }; diff --git a/backend/src/ee/services/pam-resource/pam-resource-types.ts b/backend/src/ee/services/pam-resource/pam-resource-types.ts index 1ca9db3e29..9da0948018 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-types.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-types.ts @@ -1,3 +1,5 @@ +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; + import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service"; import { TMySQLAccount, @@ -5,22 +7,31 @@ import { TMySQLResource, TMySQLResourceConnectionDetails } from "./mysql/mysql-resource-types"; -import { PamResource } from "./pam-resource-enums"; +import { PamResource, PamResourceOrderBy } from "./pam-resource-enums"; import { TPostgresAccount, TPostgresAccountCredentials, TPostgresResource, TPostgresResourceConnectionDetails } from "./postgres/postgres-resource-types"; +import { + TSSHAccount, + TSSHAccountCredentials, + TSSHResource, + TSSHResourceConnectionDetails +} from "./ssh/ssh-resource-types"; // Resource types -export type TPamResource = TPostgresResource | TMySQLResource; -export type TPamResourceConnectionDetails = TPostgresResourceConnectionDetails | TMySQLResourceConnectionDetails; +export type TPamResource = TPostgresResource | TMySQLResource | TSSHResource; +export type TPamResourceConnectionDetails = + | TPostgresResourceConnectionDetails + | TMySQLResourceConnectionDetails + | TSSHResourceConnectionDetails; // Account types -export type TPamAccount = TPostgresAccount | TMySQLAccount; +export type TPamAccount = TPostgresAccount | TMySQLAccount | TSSHAccount; // eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents -export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials; +export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials | TSSHAccountCredentials; // Resource DTOs export type TCreateResourceDTO = Pick< @@ -32,6 +43,15 @@ export type TUpdateResourceDTO = Partial = () => Promise; export type TPamResourceFactoryValidateAccountCredentials = ( @@ -51,4 +71,5 @@ export type TPamResourceFactory; validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials; rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials; + handleOverwritePreventionForCensoredValues: (updatedAccountCredentials: C, currentCredentials: C) => Promise; }; diff --git a/backend/src/ee/services/pam-resource/shared/sql/sql-resource-factory.ts b/backend/src/ee/services/pam-resource/shared/sql/sql-resource-factory.ts index 7dd7948ef6..b3128c4228 100644 --- a/backend/src/ee/services/pam-resource/shared/sql/sql-resource-factory.ts +++ b/backend/src/ee/services/pam-resource/shared/sql/sql-resource-factory.ts @@ -337,9 +337,24 @@ export const sqlResourceFactory: TPamResourceFactory { + if (updatedAccountCredentials.password === "__INFISICAL_UNCHANGED__") { + return { + ...updatedAccountCredentials, + password: currentCredentials.password + }; + } + + return updatedAccountCredentials; + }; + return { validateConnection, validateAccountCredentials, - rotateAccountCredentials + rotateAccountCredentials, + handleOverwritePreventionForCensoredValues }; }; diff --git a/backend/src/ee/services/pam-resource/ssh/ssh-resource-enums.ts b/backend/src/ee/services/pam-resource/ssh/ssh-resource-enums.ts new file mode 100644 index 0000000000..9b6ed1f157 --- /dev/null +++ b/backend/src/ee/services/pam-resource/ssh/ssh-resource-enums.ts @@ -0,0 +1,5 @@ +export enum SSHAuthMethod { + Password = "password", + PublicKey = "public-key", + Certificate = "certificate" +} diff --git a/backend/src/ee/services/pam-resource/ssh/ssh-resource-factory.ts b/backend/src/ee/services/pam-resource/ssh/ssh-resource-factory.ts new file mode 100644 index 0000000000..b90aa00c6c --- /dev/null +++ b/backend/src/ee/services/pam-resource/ssh/ssh-resource-factory.ts @@ -0,0 +1,265 @@ +import { Client } from "ssh2"; + +import { BadRequestError } from "@app/lib/errors"; +import { GatewayProxyProtocol } from "@app/lib/gateway"; +import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2"; +import { logger } from "@app/lib/logger"; + +import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns"; +import { TGatewayV2ServiceFactory } from "../../gateway-v2/gateway-v2-service"; +import { PamResource } from "../pam-resource-enums"; +import { + TPamResourceFactory, + TPamResourceFactoryRotateAccountCredentials, + TPamResourceFactoryValidateAccountCredentials +} from "../pam-resource-types"; +import { SSHAuthMethod } from "./ssh-resource-enums"; +import { TSSHAccountCredentials, TSSHResourceConnectionDetails } from "./ssh-resource-types"; + +const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000; + +export const executeWithGateway = async ( + config: { + connectionDetails: TSSHResourceConnectionDetails; + resourceType: PamResource; + gatewayId: string; + }, + gatewayV2Service: Pick, + operation: (proxyPort: number) => Promise +): Promise => { + const { connectionDetails, gatewayId } = config; + const [targetHost] = await verifyHostInputValidity(connectionDetails.host, true); + const platformConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({ + gatewayId, + targetHost, + targetPort: connectionDetails.port + }); + + if (!platformConnectionDetails) { + throw new BadRequestError({ message: "Unable to connect to gateway, no platform connection details found" }); + } + + return withGatewayV2Proxy( + async (proxyPort) => { + return operation(proxyPort); + }, + { + protocol: GatewayProxyProtocol.Tcp, + relayHost: platformConnectionDetails.relayHost, + gateway: platformConnectionDetails.gateway, + relay: platformConnectionDetails.relay + } + ); +}; + +export const sshResourceFactory: TPamResourceFactory = ( + resourceType, + connectionDetails, + gatewayId, + gatewayV2Service +) => { + const validateConnection = async () => { + try { + await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => { + return new Promise((resolve, reject) => { + const client = new Client(); + let handshakeComplete = false; + + client.on("error", (err) => { + logger.info( + { error: err.message, handshakeComplete }, + "[SSH Resource Factory] SSH client error event received" + ); + // If we got an authentication error, it means we successfully reached the SSH server + // and completed the SSH handshake - that's good enough for connection validation + if (handshakeComplete || err.message.includes("authentication") || err.message.includes("publickey")) { + logger.info( + { handshakeComplete, errorMessage: err.message }, + "[SSH Resource Factory] SSH connection validation succeeded (auth error after handshake)" + ); + client.end(); + resolve(); + } else { + logger.error( + { error: err.message, handshakeComplete }, + "[SSH Resource Factory] SSH connection validation failed" + ); + reject(err); + } + }); + + client.on("handshake", () => { + // SSH handshake completed - the server is reachable and responding + logger.info("[SSH Resource Factory] SSH handshake event received - setting handshakeComplete to true"); + handshakeComplete = true; + client.end(); + resolve(); + }); + + client.on("timeout", () => { + logger.error("[SSH Resource Factory] SSH connection timeout"); + reject(new Error("Connection timeout")); + }); + + // Attempt connection with a dummy username (we don't care about auth success) + // The goal is just to verify SSH server is reachable and responding + client.connect({ + host: "localhost", + port: proxyPort, + username: "infisical-connection-test", + password: "infisical-connection-test-password", + readyTimeout: EXTERNAL_REQUEST_TIMEOUT, + tryKeyboard: false, + // We want to fail fast on auth, we're just testing reachability + authHandler: () => { + // If authHandler is called, SSH handshake succeeded + handshakeComplete = true; + return false; // Don't continue with auth + } + }); + }); + }); + return connectionDetails; + } catch (error) { + throw new BadRequestError({ + message: `Unable to validate connection to ${resourceType}: ${(error as Error).message || String(error)}` + }); + } + }; + + const validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials = async ( + credentials + ) => { + try { + await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => { + return new Promise((resolve, reject) => { + const client = new Client(); + + client.on("ready", () => { + logger.info( + { username: credentials.username, authMethod: credentials.authMethod }, + "[SSH Resource Factory] SSH authentication successful" + ); + client.end(); + resolve(); + }); + + client.on("error", (err) => { + logger.error( + { error: err.message, username: credentials.username, authMethod: credentials.authMethod }, + "[SSH Resource Factory] SSH authentication failed" + ); + reject(err); + }); + + client.on("timeout", () => { + logger.error( + { username: credentials.username, authMethod: credentials.authMethod }, + "[SSH Resource Factory] SSH authentication timeout" + ); + reject(new Error("Connection timeout")); + }); + + // Build connection config based on auth method + const baseConfig = { + host: "localhost", + port: proxyPort, + username: credentials.username, + readyTimeout: EXTERNAL_REQUEST_TIMEOUT + }; + + switch (credentials.authMethod) { + case SSHAuthMethod.Password: + client.connect({ + ...baseConfig, + password: credentials.password, + tryKeyboard: false + }); + break; + case SSHAuthMethod.PublicKey: + client.connect({ + ...baseConfig, + privateKey: credentials.privateKey, + tryKeyboard: false + }); + break; + default: + reject(new Error(`Unsupported SSH auth method: ${(credentials as TSSHAccountCredentials).authMethod}`)); + } + }); + }); + return credentials; + } catch (error) { + if (error instanceof Error) { + // Check for common authentication failure messages + if ( + error.message.includes("authentication") || + error.message.includes("All configured authentication methods failed") || + error.message.includes("publickey") + ) { + throw new BadRequestError({ + message: "Account credentials invalid." + }); + } + + if (error.message === "Connection timeout") { + throw new BadRequestError({ + message: "Connection timeout. Verify that the SSH server is reachable" + }); + } + } + + throw new BadRequestError({ + message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}` + }); + } + }; + + const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials = async ( + rotationAccountCredentials + ) => { + return rotationAccountCredentials; + }; + + const handleOverwritePreventionForCensoredValues = async ( + updatedAccountCredentials: TSSHAccountCredentials, + currentCredentials: TSSHAccountCredentials + ) => { + if (updatedAccountCredentials.authMethod !== currentCredentials.authMethod) { + return updatedAccountCredentials; + } + + if ( + updatedAccountCredentials.authMethod === SSHAuthMethod.Password && + currentCredentials.authMethod === SSHAuthMethod.Password + ) { + if (updatedAccountCredentials.password === "__INFISICAL_UNCHANGED__") { + return { + ...updatedAccountCredentials, + password: currentCredentials.password + }; + } + } + + if ( + updatedAccountCredentials.authMethod === SSHAuthMethod.PublicKey && + currentCredentials.authMethod === SSHAuthMethod.PublicKey + ) { + if (updatedAccountCredentials.privateKey === "__INFISICAL_UNCHANGED__") { + return { + ...updatedAccountCredentials, + privateKey: currentCredentials.privateKey + }; + } + } + + return updatedAccountCredentials; + }; + + return { + validateConnection, + validateAccountCredentials, + rotateAccountCredentials, + handleOverwritePreventionForCensoredValues + }; +}; diff --git a/backend/src/ee/services/pam-resource/ssh/ssh-resource-schemas.ts b/backend/src/ee/services/pam-resource/ssh/ssh-resource-schemas.ts new file mode 100644 index 0000000000..97d462369a --- /dev/null +++ b/backend/src/ee/services/pam-resource/ssh/ssh-resource-schemas.ts @@ -0,0 +1,117 @@ +import { z } from "zod"; + +import { PamResource } from "../pam-resource-enums"; +import { + BaseCreatePamAccountSchema, + BaseCreatePamResourceSchema, + BasePamAccountSchema, + BasePamAccountSchemaWithResource, + BasePamResourceSchema, + BaseUpdatePamAccountSchema, + BaseUpdatePamResourceSchema +} from "../pam-resource-schemas"; +import { SSHAuthMethod } from "./ssh-resource-enums"; + +export const BaseSSHResourceSchema = BasePamResourceSchema.extend({ resourceType: z.literal(PamResource.SSH) }); + +export const SSHResourceListItemSchema = z.object({ + name: z.literal("SSH"), + resource: z.literal(PamResource.SSH) +}); + +export const SSHResourceConnectionDetailsSchema = z.object({ + host: z.string().trim().max(255), + port: z.number() +}); + +export const SSHPasswordCredentialsSchema = z.object({ + authMethod: z.literal(SSHAuthMethod.Password), + username: z.string().trim().max(255), + password: z.string().trim().max(255) +}); + +export const SSHPublicKeyCredentialsSchema = z.object({ + authMethod: z.literal(SSHAuthMethod.PublicKey), + username: z.string().trim().max(255), + privateKey: z.string().trim().max(5000) +}); + +export const SSHCertificateCredentialsSchema = z.object({ + authMethod: z.literal(SSHAuthMethod.Certificate), + username: z.string().trim().max(255) +}); + +export const SSHAccountCredentialsSchema = z.discriminatedUnion("authMethod", [ + SSHPasswordCredentialsSchema, + SSHPublicKeyCredentialsSchema, + SSHCertificateCredentialsSchema +]); + +export const SSHResourceSchema = BaseSSHResourceSchema.extend({ + connectionDetails: SSHResourceConnectionDetailsSchema, + rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional() +}); + +export const SanitizedSSHResourceSchema = BaseSSHResourceSchema.extend({ + connectionDetails: SSHResourceConnectionDetailsSchema, + rotationAccountCredentials: z + .discriminatedUnion("authMethod", [ + z.object({ + authMethod: z.literal(SSHAuthMethod.Password), + username: z.string() + }), + z.object({ + authMethod: z.literal(SSHAuthMethod.PublicKey), + username: z.string() + }), + z.object({ + authMethod: z.literal(SSHAuthMethod.Certificate), + username: z.string() + }) + ]) + .nullable() + .optional() +}); + +export const CreateSSHResourceSchema = BaseCreatePamResourceSchema.extend({ + connectionDetails: SSHResourceConnectionDetailsSchema, + rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional() +}); + +export const UpdateSSHResourceSchema = BaseUpdatePamResourceSchema.extend({ + connectionDetails: SSHResourceConnectionDetailsSchema.optional(), + rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional() +}); + +// Accounts +export const SSHAccountSchema = BasePamAccountSchema.extend({ + credentials: SSHAccountCredentialsSchema +}); + +export const CreateSSHAccountSchema = BaseCreatePamAccountSchema.extend({ + credentials: SSHAccountCredentialsSchema +}); + +export const UpdateSSHAccountSchema = BaseUpdatePamAccountSchema.extend({ + credentials: SSHAccountCredentialsSchema.optional() +}); + +export const SanitizedSSHAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ + credentials: z.discriminatedUnion("authMethod", [ + z.object({ + authMethod: z.literal(SSHAuthMethod.Password), + username: z.string() + }), + z.object({ + authMethod: z.literal(SSHAuthMethod.PublicKey), + username: z.string() + }), + z.object({ + authMethod: z.literal(SSHAuthMethod.Certificate), + username: z.string() + }) + ]) +}); + +// Sessions +export const SSHSessionCredentialsSchema = SSHResourceConnectionDetailsSchema.and(SSHAccountCredentialsSchema); diff --git a/backend/src/ee/services/pam-resource/ssh/ssh-resource-types.ts b/backend/src/ee/services/pam-resource/ssh/ssh-resource-types.ts new file mode 100644 index 0000000000..920dc42746 --- /dev/null +++ b/backend/src/ee/services/pam-resource/ssh/ssh-resource-types.ts @@ -0,0 +1,16 @@ +import { z } from "zod"; + +import { + SSHAccountCredentialsSchema, + SSHAccountSchema, + SSHResourceConnectionDetailsSchema, + SSHResourceSchema +} from "./ssh-resource-schemas"; + +// Resources +export type TSSHResource = z.infer; +export type TSSHResourceConnectionDetails = z.infer; + +// Accounts +export type TSSHAccount = z.infer; +export type TSSHAccountCredentials = z.infer; diff --git a/backend/src/ee/services/pam-session/pam-session-fns.ts b/backend/src/ee/services/pam-session/pam-session-fns.ts index 4afe205b5a..38e9f6239b 100644 --- a/backend/src/ee/services/pam-session/pam-session-fns.ts +++ b/backend/src/ee/services/pam-session/pam-session-fns.ts @@ -2,7 +2,7 @@ import { TPamSessions } from "@app/db/schemas"; import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { KmsDataKey } from "@app/services/kms/kms-types"; -import { TPamSanitizedSession, TPamSessionCommandLog } from "./pam-session.types"; +import { TPamSanitizedSession, TPamSessionCommandLog, TTerminalEvent } from "./pam-session-types"; export const decryptSessionCommandLogs = async ({ projectId, @@ -22,7 +22,7 @@ export const decryptSessionCommandLogs = async ({ cipherTextBlob: encryptedLogs }); - return JSON.parse(decryptedPlainTextBlob.toString()) as TPamSessionCommandLog; + return JSON.parse(decryptedPlainTextBlob.toString()) as (TPamSessionCommandLog | TTerminalEvent)[]; }; export const decryptSession = async ( @@ -32,7 +32,7 @@ export const decryptSession = async ( ) => { return { ...session, - commandLogs: session.encryptedLogsBlob + logs: session.encryptedLogsBlob ? await decryptSessionCommandLogs({ projectId, encryptedLogs: session.encryptedLogsBlob, diff --git a/backend/src/ee/services/pam-session/pam-session-schemas.ts b/backend/src/ee/services/pam-session/pam-session-schemas.ts index 2bc1d53452..db24931966 100644 --- a/backend/src/ee/services/pam-session/pam-session-schemas.ts +++ b/backend/src/ee/services/pam-session/pam-session-schemas.ts @@ -8,8 +8,18 @@ export const PamSessionCommandLogSchema = z.object({ timestamp: z.coerce.date() }); +// SSH Terminal Event schemas +export const TerminalEventTypeSchema = z.enum(["input", "output", "resize", "error"]); + +export const TerminalEventSchema = z.object({ + timestamp: z.coerce.date(), + eventType: TerminalEventTypeSchema, + data: z.string(), // Base64 encoded binary data + elapsedTime: z.number() // Seconds since session start (for replay) +}); + export const SanitizedSessionSchema = PamSessionsSchema.omit({ encryptedLogsBlob: true }).extend({ - commandLogs: PamSessionCommandLogSchema.array() + logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema])) }); diff --git a/backend/src/ee/services/pam-session/pam-session-service.ts b/backend/src/ee/services/pam-session/pam-session-service.ts index 26ff7daa6b..18c185cacf 100644 --- a/backend/src/ee/services/pam-session/pam-session-service.ts +++ b/backend/src/ee/services/pam-session/pam-session-service.ts @@ -12,10 +12,10 @@ import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TLicenseServiceFactory } from "../license/license-service"; import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission"; import { ProjectPermissionPamSessionActions, ProjectPermissionSub } from "../permission/project-permission"; -import { TUpdateSessionLogsDTO } from "./pam-session.types"; import { TPamSessionDALFactory } from "./pam-session-dal"; import { PamSessionStatus } from "./pam-session-enums"; import { decryptSession } from "./pam-session-fns"; +import { TUpdateSessionLogsDTO } from "./pam-session-types"; type TPamSessionServiceFactoryDep = { pamSessionDAL: TPamSessionDALFactory; diff --git a/backend/src/ee/services/pam-session/pam-session.types.ts b/backend/src/ee/services/pam-session/pam-session-types.ts similarity index 52% rename from backend/src/ee/services/pam-session/pam-session.types.ts rename to backend/src/ee/services/pam-session/pam-session-types.ts index 0c87a9fa4d..893f930e51 100644 --- a/backend/src/ee/services/pam-session/pam-session.types.ts +++ b/backend/src/ee/services/pam-session/pam-session-types.ts @@ -1,12 +1,13 @@ import { z } from "zod"; -import { PamSessionCommandLogSchema, SanitizedSessionSchema } from "./pam-session-schemas"; +import { PamSessionCommandLogSchema, SanitizedSessionSchema, TerminalEventSchema } from "./pam-session-schemas"; export type TPamSessionCommandLog = z.infer; +export type TTerminalEvent = z.infer; export type TPamSanitizedSession = z.infer; // DTOs export type TUpdateSessionLogsDTO = { sessionId: string; - logs: TPamSessionCommandLog[]; + logs: (TPamSessionCommandLog | TTerminalEvent)[]; }; diff --git a/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts b/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts index 9148b03366..7a3747fedf 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts @@ -74,7 +74,12 @@ export const pkiAcmeChallengeServiceFactory = ({ // Notice: well, we are in a transaction, ideally we should not hold transaction and perform // a long running operation for long time. But assuming we are not performing a tons of // challenge validation at the same time, it should be fine. - const challengeResponse = await fetch(challengeUrl, { signal: AbortSignal.timeout(timeoutMs) }); + const challengeResponse = await fetch(challengeUrl, { + // In case if we override the host in the development mode, still provide the original host in the header + // to help the upstream server to validate the request + headers: { Host: host }, + signal: AbortSignal.timeout(timeoutMs) + }); if (challengeResponse.status !== 200) { throw new AcmeIncorrectResponseError({ message: `ACME challenge response is not 200: ${challengeResponse.status}` diff --git a/backend/src/ee/services/pki-acme/pki-acme-schemas.ts b/backend/src/ee/services/pki-acme/pki-acme-schemas.ts index 58ca7e8333..23b86d172c 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-schemas.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-schemas.ts @@ -58,7 +58,15 @@ export const GetAcmeDirectoryResponseSchema = z.object({ newNonce: z.string(), newAccount: z.string(), newOrder: z.string(), - revokeCert: z.string().optional() + revokeCert: z.string().optional(), + meta: z + .object({ + termsOfService: z.string().optional(), + website: z.string().optional(), + caaIdentities: z.array(z.string()).optional(), + externalAccountRequired: z.boolean().optional() + }) + .optional() }); // New Account payload schema diff --git a/backend/src/ee/services/pki-acme/pki-acme-service.ts b/backend/src/ee/services/pki-acme/pki-acme-service.ts index 43da08b1cd..4f560ade76 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-service.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-service.ts @@ -206,6 +206,9 @@ export const pkiAcmeServiceFactory = ({ const { protectedHeader: rawProtectedHeader, payload: rawPayload } = result; try { const protectedHeader = ProtectedHeaderSchema.parse(rawProtectedHeader); + if (protectedHeader.jwk && protectedHeader.kid) { + throw new AcmeMalformedError({ message: "Both JWK and KID are provided in the protected header" }); + } const parsedUrl = (() => { try { return new URL(protectedHeader.url); @@ -288,6 +291,7 @@ export const pkiAcmeServiceFactory = ({ url, rawJwsPayload, getJWK: async (protectedHeader) => { + // get jwk instead of kid if (!protectedHeader.kid) { throw new AcmeMalformedError({ message: "KID is required in the protected header" }); } @@ -353,7 +357,10 @@ export const pkiAcmeServiceFactory = ({ return { newNonce: buildUrl(profile.id, "/new-nonce"), newAccount: buildUrl(profile.id, "/new-account"), - newOrder: buildUrl(profile.id, "/new-order") + newOrder: buildUrl(profile.id, "/new-order"), + meta: { + externalAccountRequired: true + } }; }; @@ -386,11 +393,61 @@ export const pkiAcmeServiceFactory = ({ payload: TCreateAcmeAccountPayload; }): Promise> => { const profile = await validateAcmeProfile(profileId); + const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256"); + + const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg( + profileId, + alg, + publicKeyThumbprint + ); + if (onlyReturnExisting) { + if (!existingAccount) { + throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" }); + } + return { + status: 200, + body: { + status: "valid", + contact: existingAccount.emails, + orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`) + }, + headers: { + Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`), + Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"` + } + }; + } + + // Note: We only check EAB for the new account request. This is a very special case for cert-manager. + // There's a bug in their ACME client implementation, they don't take the account KID value they have + // and relying on a '{"onlyReturnExisting": true}' new-account request to find out their KID value. + // But the problem is, that new-account request doesn't come with EAB. And while the get existing account operation + // fails, they just discard the error and proceed to request a new order. Since no KID provided, their ACME + // client will send JWK instead. As a result, we are seeing KID not provide in header error for the new-order + // endpoint. + // + // To solve the problem, we lose the check for EAB a bit for the onlyReturnExisting new account request. + // It should be fine as we've already checked EAB when they created the account. + // And the private key ownership indicating they are the same user. + // ref: https://github.com/cert-manager/cert-manager/issues/7388#issuecomment-3535630925 if (!externalAccountBinding) { throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" }); } + if (existingAccount) { + return { + status: 200, + body: { + status: "valid", + contact: existingAccount.emails, + orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`) + }, + headers: { + Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`), + Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"` + } + }; + } - const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256"); const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ projectId: profile.projectId, projectDAL, @@ -441,30 +498,7 @@ export const pkiAcmeServiceFactory = ({ }); } - const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg( - profileId, - alg, - publicKeyThumbprint - ); - if (onlyReturnExisting && !existingAccount) { - throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" }); - } - if (existingAccount) { - // With the same public key, we found an existing account, just return it - return { - status: 200, - body: { - status: "valid", - contact: existingAccount.emails, - orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`) - }, - headers: { - Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`), - Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"` - } - }; - } - + // TODO: handle unique constraint violation error, should be very very rare const newAccount = await acmeAccountDAL.create({ profileId: profile.id, alg, diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts index 2610d9324d..9f9b0604c8 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts @@ -354,16 +354,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { (tx || db.replicaNode())(TableName.SecretApprovalRequest) .join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`) .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) - .join( - TableName.SecretApprovalPolicyApprover, - `${TableName.SecretApprovalRequest}.policyId`, - `${TableName.SecretApprovalPolicyApprover}.policyId` - ) .join( TableName.SecretApprovalPolicy, `${TableName.SecretApprovalRequest}.policyId`, `${TableName.SecretApprovalPolicy}.id` ) + .leftJoin( + TableName.SecretApprovalPolicyApprover, + `${TableName.SecretApprovalPolicy}.id`, + `${TableName.SecretApprovalPolicyApprover}.policyId` + ) + .leftJoin( + TableName.UserGroupMembership, + `${TableName.SecretApprovalPolicyApprover}.approverGroupId`, + `${TableName.UserGroupMembership}.groupId` + ) .where({ projectId }) .where((qb) => { if (policyId) void qb.where(`${TableName.SecretApprovalPolicy}.id`, policyId); @@ -373,10 +378,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { void bd .where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId) .orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId) + .orWhere(`${TableName.UserGroupMembership}.userId`, userId) ) .select("status", `${TableName.SecretApprovalRequest}.id`) .groupBy(`${TableName.SecretApprovalRequest}.id`, "status") - .count("status") ) .select("status") .from("temp") @@ -499,7 +504,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { const query = (tx || db.replicaNode()) .select("*") - .select(db.raw("count(*) OVER() as total_count")) .from(innerQuery) .orderBy("createdAt", "desc") as typeof innerQuery; @@ -519,6 +523,14 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { }); } + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const countResult = await (tx || db.replicaNode()) + .count({ count: "*" }) + .from(query.clone().as("count_query")) + .first(); + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const totalCount = Number(countResult?.count || 0); + const docs = await (tx || db) .with("w", query) .select("*") @@ -526,9 +538,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { .where("w.rank", ">=", offset) .andWhere("w.rank", "<", offset + limit); - // @ts-expect-error knex does not infer - const totalCount = Number(docs[0]?.total_count || 0); - const formattedDoc = sqlNestRelationships({ data: docs, key: "id", diff --git a/backend/src/lib/api-docs/constants.ts b/backend/src/lib/api-docs/constants.ts index 15ec747ef2..8bd2827fcb 100644 --- a/backend/src/lib/api-docs/constants.ts +++ b/backend/src/lib/api-docs/constants.ts @@ -584,6 +584,10 @@ export const TOKEN_AUTH = { offset: "The offset to start from. If you enter 10, it will start from the 10th token.", limit: "The number of tokens to return." }, + GET_TOKEN: { + identityId: "The ID of the machine identity to get the token for.", + tokenId: "The ID of the token to get metadata for." + }, CREATE_TOKEN: { identityId: "The ID of the machine identity to create the token for.", name: "The name of the token to create." diff --git a/backend/src/lib/config/env.ts b/backend/src/lib/config/env.ts index 96107306f7..11de576670 100644 --- a/backend/src/lib/config/env.ts +++ b/backend/src/lib/config/env.ts @@ -400,7 +400,7 @@ const envSchema = z isAcmeDevelopmentMode: data.NODE_ENV === "development" && data.ACME_DEVELOPMENT_MODE, isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED, isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS), - isBddNockApiEnabled: data.NODE_ENV === "development" && data.BDD_NOCK_API_ENABLED, + isBddNockApiEnabled: data.NODE_ENV !== "production" && data.BDD_NOCK_API_ENABLED, REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim() ?.split(",") .map((el) => { diff --git a/backend/src/server/routes/bdd/bdd-nock-router.dev.ts b/backend/src/server/routes/bdd/bdd-nock-router.dev.ts new file mode 100644 index 0000000000..c5f6001f55 --- /dev/null +++ b/backend/src/server/routes/bdd/bdd-nock-router.dev.ts @@ -0,0 +1,104 @@ +import type { Definition } from "nock"; +import { z } from "zod"; + +import { getConfig } from "@app/lib/config/env"; +import { ForbiddenRequestError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +// When running in production, we don't want to even import nock, because it's not needed and it increases memory usage a lots. +// It once caused an outage in the production environment. +// This is why we would rather to crash the app if it's not in development mode (in that case, Kubernetes should stop it from rolling out). +if (process.env.NODE_ENV === "production") { + throw new Error("BDD Nock API can only be enabled in development or test mode"); +} + +export const registerBddNockRouter = async (server: FastifyZodProvider) => { + const appCfg = getConfig(); + const importNock = async () => { + // eslint-disable-next-line import/no-extraneous-dependencies + const { default: nock } = await import("nock"); + return nock; + }; + + const checkIfBddNockApiEnabled = () => { + // Note: Please note that this API is only available in development mode and only for BDD tests. + // This endpoint should NEVER BE ENABLED IN PRODUCTION! + if (appCfg.NODE_ENV === "production" || !appCfg.isBddNockApiEnabled) { + throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" }); + } + }; + + server.route({ + method: "POST", + url: "/define", + schema: { + body: z.object({ definitions: z.unknown().array() }), + response: { + 200: z.object({ status: z.string() }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + checkIfBddNockApiEnabled(); + const { body } = req; + const { definitions } = body; + logger.info(definitions, "Defining nock"); + const processedDefinitions = definitions.map((definition: unknown) => { + const { path, ...rest } = definition as Definition; + return { + ...rest, + path: + path !== undefined && typeof path === "string" + ? path + : new RegExp((path as unknown as { regex: string }).regex ?? "") + } as Definition; + }); + + const nock = await importNock(); + nock.define(processedDefinitions); + // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call. + if (!nock.isActive()) { + nock.activate(); + } + return { status: "ok" }; + } + }); + + server.route({ + method: "POST", + url: "/clean-all", + schema: { + response: { + 200: z.object({ status: z.string() }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async () => { + checkIfBddNockApiEnabled(); + logger.info("Cleaning all nocks"); + const nock = await importNock(); + nock.cleanAll(); + return { status: "ok" }; + } + }); + + server.route({ + method: "POST", + url: "/restore", + schema: { + response: { + 200: z.object({ status: z.string() }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async () => { + checkIfBddNockApiEnabled(); + logger.info("Restore network requests from nock"); + const nock = await importNock(); + nock.restore(); + return { status: "ok" }; + } + }); +}; diff --git a/backend/src/server/routes/bdd/bdd-nock-router.ts b/backend/src/server/routes/bdd/bdd-nock-router.ts new file mode 100644 index 0000000000..90f2ed00c6 --- /dev/null +++ b/backend/src/server/routes/bdd/bdd-nock-router.ts @@ -0,0 +1,6 @@ +export const registerBddNockRouter = async () => { + // This route is only available in development or test mode. + // The actual implementation is in the dev.ts file and will be aliased to that file in development or test mode. + // And if somehow we try to enable it in production, we will throw an error. + throw new Error("BDD Nock should not be enabled in production"); +}; diff --git a/backend/src/server/routes/index.ts b/backend/src/server/routes/index.ts index 5dd7a1c22a..2b2023eb2b 100644 --- a/backend/src/server/routes/index.ts +++ b/backend/src/server/routes/index.ts @@ -1,3 +1,4 @@ +import { registerBddNockRouter } from "@bdd_routes/bdd-nock-router"; import { CronJob } from "cron"; import { Knex } from "knex"; import { monitorEventLoopDelay } from "perf_hooks"; @@ -2431,6 +2432,7 @@ export const registerRoutes = async ( } } + await kmsService.startService(hsmStatus); await telemetryQueue.startTelemetryCheck(); await telemetryQueue.startAggregatedEventsJob(); await dailyResourceCleanUp.init(); @@ -2443,7 +2445,6 @@ export const registerRoutes = async ( await pkiSubscriberQueue.startDailyAutoRenewalJob(); await pkiAlertV2Queue.init(); await certificateV3Queue.init(); - await kmsService.startService(hsmStatus); await microsoftTeamsService.start(); await dynamicSecretQueueService.init(); await eventBusService.init(); @@ -2698,6 +2699,12 @@ export const registerRoutes = async ( await server.register(registerV3Routes, { prefix: "/api/v3" }); await server.register(registerV4Routes, { prefix: "/api/v4" }); + // Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests. + // This route should NEVER BE ENABLED IN PRODUCTION! + if (getConfig().isBddNockApiEnabled) { + await server.register(registerBddNockRouter, { prefix: "/api/__bdd_nock__" }); + } + server.addHook("onClose", async () => { cronJobs.forEach((job) => job.stop()); await telemetryService.flushAll(); diff --git a/backend/src/server/routes/v1/admin-router.ts b/backend/src/server/routes/v1/admin-router.ts index ddb3f23264..f6ec36f6f0 100644 --- a/backend/src/server/routes/v1/admin-router.ts +++ b/backend/src/server/routes/v1/admin-router.ts @@ -9,6 +9,8 @@ import { SuperAdminSchema, UsersSchema } from "@app/db/schemas"; +import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns"; +import { LicenseType } from "@app/ee/services/license/license-types"; import { getConfig, overridableKeys } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError } from "@app/lib/errors"; @@ -65,6 +67,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { const config = await getServerCfg(); const serverEnvs = getConfig(); + const licenseKeyConfig = getLicenseKeyConfig(); + const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline; + return { config: { ...config, @@ -73,7 +78,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => { isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING, kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN, paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED, - isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE + isOfflineUsageReportsEnabled: hasOfflineLicense } }; } diff --git a/backend/src/server/routes/v1/bdd-nock-router.ts b/backend/src/server/routes/v1/bdd-nock-router.ts deleted file mode 100644 index 6a32cac20a..0000000000 --- a/backend/src/server/routes/v1/bdd-nock-router.ts +++ /dev/null @@ -1,87 +0,0 @@ -// import { z } from "zod"; - -// import { getConfig } from "@app/lib/config/env"; -// import { ForbiddenRequestError } from "@app/lib/errors"; -// import { logger } from "@app/lib/logger"; -// import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -// import { AuthMode } from "@app/services/auth/auth-type"; - -// export const registerBddNockRouter = async (server: FastifyZodProvider) => { -// const checkIfBddNockApiEnabled = () => { -// const appCfg = getConfig(); -// // Note: Please note that this API is only available in development mode and only for BDD tests. -// // This endpoint should NEVER BE ENABLED IN PRODUCTION! -// if (appCfg.NODE_ENV !== "development" || !appCfg.isBddNockApiEnabled) { -// throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" }); -// } -// }; - -// server.route({ -// method: "POST", -// url: "/define", -// schema: { -// body: z.object({ definitions: z.unknown().array() }), -// response: { -// 200: z.object({ status: z.string() }) -// } -// }, -// onRequest: verifyAuth([AuthMode.JWT]), -// handler: async (req) => { -// checkIfBddNockApiEnabled(); -// const { body } = req; -// const { definitions } = body; -// logger.info(definitions, "Defining nock"); -// const processedDefinitions = definitions.map((definition: unknown) => { -// const { path, ...rest } = definition as Definition; -// return { -// ...rest, -// path: -// path !== undefined && typeof path === "string" -// ? path -// : new RegExp((path as unknown as { regex: string }).regex ?? "") -// } as Definition; -// }); - -// nock.define(processedDefinitions); -// // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call. -// if (!nock.isActive()) { -// nock.activate(); -// } -// return { status: "ok" }; -// } -// }); - -// server.route({ -// method: "POST", -// url: "/clean-all", -// schema: { -// response: { -// 200: z.object({ status: z.string() }) -// } -// }, -// onRequest: verifyAuth([AuthMode.JWT]), -// handler: async () => { -// checkIfBddNockApiEnabled(); -// logger.info("Cleaning all nocks"); -// nock.cleanAll(); -// return { status: "ok" }; -// } -// }); - -// server.route({ -// method: "POST", -// url: "/restore", -// schema: { -// response: { -// 200: z.object({ status: z.string() }) -// } -// }, -// onRequest: verifyAuth([AuthMode.JWT]), -// handler: async () => { -// checkIfBddNockApiEnabled(); -// logger.info("Restore network requests from nock"); -// nock.restore(); -// return { status: "ok" }; -// } -// }); -// }; diff --git a/backend/src/server/routes/v1/identity-token-auth-router.ts b/backend/src/server/routes/v1/identity-token-auth-router.ts index aafffdfdb1..d7cd86330b 100644 --- a/backend/src/server/routes/v1/identity-token-auth-router.ts +++ b/backend/src/server/routes/v1/identity-token-auth-router.ts @@ -314,7 +314,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider accessToken: z.string(), expiresIn: z.coerce.number(), accessTokenMaxTTL: z.coerce.number(), - tokenType: z.literal("Bearer") + tokenType: z.literal("Bearer"), + tokenData: IdentityAccessTokensSchema }) } }, @@ -346,7 +347,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider accessToken, tokenType: "Bearer" as const, expiresIn: identityTokenAuth.accessTokenTTL, - accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL + accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL, + tokenData: identityAccessToken }; } }); @@ -406,6 +408,60 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider } }); + server.route({ + method: "GET", + url: "/token-auth/identities/:identityId/tokens/:tokenId", + config: { + rateLimit: readLimit + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + schema: { + hide: false, + tags: [ApiDocsTags.TokenAuth], + description: "Get token for machine identity with Token Auth", + security: [ + { + bearerAuth: [] + } + ], + params: z.object({ + identityId: z.string().describe(TOKEN_AUTH.GET_TOKEN.identityId), + tokenId: z.string().describe(TOKEN_AUTH.GET_TOKEN.tokenId) + }), + response: { + 200: z.object({ + token: IdentityAccessTokensSchema + }) + } + }, + handler: async (req) => { + const { token, identityMembershipOrg } = await server.services.identityTokenAuth.getTokenAuthTokenById({ + identityId: req.params.identityId, + tokenId: req.params.tokenId, + actor: req.permission.type, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + isActorSuperAdmin: isSuperAdmin(req.auth) + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: identityMembershipOrg.scopeOrgId, + event: { + type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH, + metadata: { + identityId: token.identityId, + identityName: identityMembershipOrg.identity.name, + tokenId: token.id + } + } + }); + + return { token }; + } + }); + server.route({ method: "PATCH", url: "/token-auth/tokens/:tokenId", diff --git a/backend/src/server/routes/v1/index.ts b/backend/src/server/routes/v1/index.ts index 68099e50e4..b480a5144a 100644 --- a/backend/src/server/routes/v1/index.ts +++ b/backend/src/server/routes/v1/index.ts @@ -8,7 +8,6 @@ import { registerSecretSyncRouter, SECRET_SYNC_REGISTER_ROUTER_MAP } from "@app/ import { registerAdminRouter } from "./admin-router"; import { registerAuthRoutes } from "./auth-router"; -// import { registerBddNockRouter } from "./bdd-nock-router"; import { registerProjectBotRouter } from "./bot-router"; import { registerCaRouter } from "./certificate-authority-router"; import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers"; @@ -238,10 +237,4 @@ export const registerV1Routes = async (server: FastifyZodProvider) => { await server.register(registerEventRouter, { prefix: "/events" }); await server.register(registerUpgradePathRouter, { prefix: "/upgrade-path" }); - - // Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests. - // This route should NEVER BE ENABLED IN PRODUCTION! - // if (getConfig().isBddNockApiEnabled) { - // await server.register(registerBddNockRouter, { prefix: "/bdd-nock" }); - // } }; diff --git a/backend/src/services/certificate-profile/certificate-profile-service.test.ts b/backend/src/services/certificate-profile/certificate-profile-service.test.ts index 327de96269..3b75c10883 100644 --- a/backend/src/services/certificate-profile/certificate-profile-service.test.ts +++ b/backend/src/services/certificate-profile/certificate-profile-service.test.ts @@ -430,7 +430,10 @@ describe("CertificateProfileService", () => { projectId: "project-123", data: { ...validProfileData, - enrollmentType: EnrollmentType.ACME + enrollmentType: EnrollmentType.ACME, + acmeConfig: {}, + apiConfig: undefined, + estConfig: undefined } }) ).rejects.toThrowError( diff --git a/backend/src/services/certificate-profile/certificate-profile-service.ts b/backend/src/services/certificate-profile/certificate-profile-service.ts index 87063c6dae..12e272ad65 100644 --- a/backend/src/services/certificate-profile/certificate-profile-service.ts +++ b/backend/src/services/certificate-profile/certificate-profile-service.ts @@ -213,7 +213,7 @@ export const certificateProfileServiceFactory = ({ throw new NotFoundError({ message: "Project not found" }); } const plan = await licenseService.getPlan(project.orgId); - if (!plan.pkiAcme) { + if (!plan.pkiAcme && data.enrollmentType === EnrollmentType.ACME) { throw new BadRequestError({ message: "Failed to create certificate profile: Plan restriction. Upgrade plan to continue" }); diff --git a/backend/src/services/identity-access-token/identity-access-token-dal.ts b/backend/src/services/identity-access-token/identity-access-token-dal.ts index ffdb786459..74b624a7e1 100644 --- a/backend/src/services/identity-access-token/identity-access-token-dal.ts +++ b/backend/src/services/identity-access-token/identity-access-token-dal.ts @@ -18,7 +18,6 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { .where(filter) .join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`) .select(selectAllTableCols(TableName.IdentityAccessToken)) - .select(db.ref("name").withSchema(TableName.Identity)) .select(db.ref("orgId").withSchema(TableName.Identity).as("identityScopeOrgId")) .first(); diff --git a/backend/src/services/identity-token-auth/identity-token-auth-service.ts b/backend/src/services/identity-token-auth/identity-token-auth-service.ts index 20c692134b..bdc8ab1c1d 100644 --- a/backend/src/services/identity-token-auth/identity-token-auth-service.ts +++ b/backend/src/services/identity-token-auth/identity-token-auth-service.ts @@ -38,6 +38,7 @@ import { TAttachTokenAuthDTO, TCreateTokenAuthTokenDTO, TGetTokenAuthDTO, + TGetTokenAuthTokenByIdDTO, TGetTokenAuthTokensDTO, TRevokeTokenAuthDTO, TRevokeTokenAuthTokenDTO, @@ -618,6 +619,52 @@ export const identityTokenAuthServiceFactory = ({ return { tokens, identityMembershipOrg }; }; + const getTokenAuthTokenById = async ({ + tokenId, + identityId, + isActorSuperAdmin, + actorId, + actor, + actorAuthMethod, + actorOrgId + }: TGetTokenAuthTokenByIdDTO) => { + await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin); + + const identityMembershipOrg = await membershipIdentityDAL.getIdentityById({ + scopeData: { + scope: AccessScope.Organization, + orgId: actorOrgId + }, + identityId + }); + if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` }); + + if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) { + throw new BadRequestError({ + message: "The identity does not have Token Auth" + }); + } + const { permission } = await permissionService.getOrgPermission({ + scope: OrganizationActionScope.Any, + actor, + actorId, + orgId: identityMembershipOrg.scopeOrgId, + actorAuthMethod, + actorOrgId + }); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity); + + const token = await identityAccessTokenDAL.findOne({ + [`${TableName.IdentityAccessToken}.id` as "id"]: tokenId, + [`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH, + [`${TableName.IdentityAccessToken}.identityId` as "identityId"]: identityId + }); + + if (!token) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` }); + + return { token, identityMembershipOrg }; + }; + const updateTokenAuthToken = async ({ tokenId, name, @@ -797,6 +844,7 @@ export const identityTokenAuthServiceFactory = ({ revokeIdentityTokenAuth, createTokenAuthToken, getTokenAuthTokens, + getTokenAuthTokenById, updateTokenAuthToken, revokeTokenAuthToken }; diff --git a/backend/src/services/identity-token-auth/identity-token-auth-types.ts b/backend/src/services/identity-token-auth/identity-token-auth-types.ts index 16cd60db77..fdecc6d4c9 100644 --- a/backend/src/services/identity-token-auth/identity-token-auth-types.ts +++ b/backend/src/services/identity-token-auth/identity-token-auth-types.ts @@ -40,6 +40,12 @@ export type TGetTokenAuthTokensDTO = { isActorSuperAdmin?: boolean; } & Omit; +export type TGetTokenAuthTokenByIdDTO = { + tokenId: string; + identityId: string; + isActorSuperAdmin?: boolean; +} & Omit; + export type TUpdateTokenAuthTokenDTO = { tokenId: string; name?: string; diff --git a/backend/src/services/offline-usage-report/offline-usage-report-service.ts b/backend/src/services/offline-usage-report/offline-usage-report-service.ts index 179232aa4a..1c34425a22 100644 --- a/backend/src/services/offline-usage-report/offline-usage-report-service.ts +++ b/backend/src/services/offline-usage-report/offline-usage-report-service.ts @@ -1,7 +1,8 @@ import crypto from "crypto"; +import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; -import { getConfig } from "@app/lib/config/env"; +import { LicenseType } from "@app/ee/services/license/license-types"; import { BadRequestError } from "@app/lib/errors"; import { TOfflineUsageReportDALFactory } from "./offline-usage-report-dal"; @@ -30,10 +31,13 @@ export const offlineUsageReportServiceFactory = ({ }; const generateUsageReportCSV = async () => { - const cfg = getConfig(); - if (!cfg.LICENSE_KEY_OFFLINE) { + const licenseKeyConfig = getLicenseKeyConfig(); + const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline; + + if (!hasOfflineLicense) { throw new BadRequestError({ - message: "Offline usage reports are not enabled. LICENSE_KEY_OFFLINE must be configured." + message: + "Offline usage reports are not enabled. Usage reports are only available for self-hosted offline instances" }); } diff --git a/backend/src/services/secret-sync/secret-sync-maps.ts b/backend/src/services/secret-sync/secret-sync-maps.ts index 529634a6dc..8829d46224 100644 --- a/backend/src/services/secret-sync/secret-sync-maps.ts +++ b/backend/src/services/secret-sync/secret-sync-maps.ts @@ -112,7 +112,7 @@ export const SECRET_SYNC_PLAN_MAP: Record = { export const SECRET_SYNC_SKIP_FIELDS_MAP: Record = { [SecretSync.AWSParameterStore]: [], - [SecretSync.AWSSecretsManager]: ["mappingBehavior", "secretName"], + [SecretSync.AWSSecretsManager]: ["mappingBehavior"], [SecretSync.GitHub]: [], [SecretSync.GCPSecretManager]: [], [SecretSync.AzureKeyVault]: [], diff --git a/backend/tsconfig.dev.json b/backend/tsconfig.dev.json new file mode 100644 index 0000000000..4bcbcd5e1c --- /dev/null +++ b/backend/tsconfig.dev.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": { + "@app/*": ["./src/*"], + "@bdd_routes/bdd-nock-router": ["./src/server/routes/bdd/bdd-nock-router.dev.ts"] + } + } +} diff --git a/backend/tsconfig.json b/backend/tsconfig.json index 523e6de5be..db076a30d6 100644 --- a/backend/tsconfig.json +++ b/backend/tsconfig.json @@ -24,7 +24,8 @@ "skipLibCheck": true, "baseUrl": ".", "paths": { - "@app/*": ["./src/*"] + "@app/*": ["./src/*"], + "@bdd_routes/*": ["./src/server/routes/bdd/*"] }, "jsx": "react-jsx" }, diff --git a/backend/tsup.config.js b/backend/tsup.config.js index e09a21ff23..80ec73a14f 100644 --- a/backend/tsup.config.js +++ b/backend/tsup.config.js @@ -2,8 +2,8 @@ import path from "node:path"; import fs from "fs/promises"; -import {replaceTscAliasPaths} from "tsc-alias"; -import {defineConfig} from "tsup"; +import { replaceTscAliasPaths } from "tsc-alias"; +import { defineConfig } from "tsup"; // Instead of using tsx or tsc for building, consider using tsup. // TSX serves as an alternative to Node.js, allowing you to build directly on the Node.js runtime. @@ -29,7 +29,7 @@ export default defineConfig({ external: ["../../../frontend/node_modules/next/dist/server/next-server.js"], outDir: "dist", tsconfig: "./tsconfig.json", - entry: ["./src"], + entry: ["./src", "!./src/**/*.dev.ts"], sourceMap: true, skipNodeModulesBundle: true, esbuildPlugins: [ @@ -45,22 +45,22 @@ export default defineConfig({ const isRelativePath = args.path.startsWith("."); const absPath = isRelativePath ? path.join(args.resolveDir, args.path) - : path.join(args.path.replace("@app", "./src")); + : path.join(args.path.replace("@app", "./src").replace("@bdd_routes", "./src/server/routes/bdd")); const isFile = await fs .stat(`${absPath}.ts`) .then((el) => el.isFile) - .catch(async (err) => { - if (err.code === "ENOTDIR") { - return true; - } + .catch(async (err) => { + if (err.code === "ENOTDIR") { + return true; + } - // If .ts file doesn't exist, try checking for .tsx file - return fs - .stat(`${absPath}.tsx`) - .then((el) => el.isFile) - .catch((err) => err.code === "ENOTDIR"); - }); + // If .ts file doesn't exist, try checking for .tsx file + return fs + .stat(`${absPath}.tsx`) + .then((el) => el.isFile) + .catch((err) => err.code === "ENOTDIR"); + }); return { path: isFile ? `${args.path}.mjs` : `${args.path}/index.mjs`, diff --git a/backend/vitest.e2e.config.mts b/backend/vitest.e2e.config.mts index 83554b818c..a37ca9518e 100644 --- a/backend/vitest.e2e.config.mts +++ b/backend/vitest.e2e.config.mts @@ -28,7 +28,8 @@ export default defineConfig({ }, resolve: { alias: { - "@app": path.resolve(__dirname, "./src") + "@app": path.resolve(__dirname, "./src"), + "@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts") } } }); diff --git a/backend/vitest.unit.config.mts b/backend/vitest.unit.config.mts index 97862d2884..aa56063a9c 100644 --- a/backend/vitest.unit.config.mts +++ b/backend/vitest.unit.config.mts @@ -11,7 +11,8 @@ export default defineConfig({ }, resolve: { alias: { - "@app": path.resolve(__dirname, "./src") + "@app": path.resolve(__dirname, "./src"), + "@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts") } } }); diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index e60ef1ba56..b75b6df221 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -71,6 +71,7 @@ services: ports: - 4000:4000 - 9464:9464 # for OTEL collection of Prometheus metrics + - 9229:9229 # For debugger access environment: - NODE_ENV=development - DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable diff --git a/docs/docs.json b/docs/docs.json index 72182232e9..57f44596fd 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -742,6 +742,7 @@ "pages": [ "documentation/platform/pki/enrollment-methods/overview", "documentation/platform/pki/enrollment-methods/api", + "documentation/platform/pki/enrollment-methods/acme", "documentation/platform/pki/enrollment-methods/est" ] }, @@ -774,6 +775,8 @@ "group": "External CA Integrations", "pages": [ "documentation/platform/pki/ca/acme-ca", + "documentation/platform/pki/ca/lets-encrypt", + "documentation/platform/pki/ca/digicert", "documentation/platform/pki/ca/azure-adcs" ] } @@ -889,15 +892,30 @@ ] }, { - "group": "Endpoints", + "group": "Organization", "pages": [ { - "group": "Admin", - "pages": ["api-reference/endpoints/admin/bootstrap-instance"] + "group": "Organization Users", + "pages": [ + "api-reference/endpoints/organizations/memberships", + "api-reference/endpoints/organizations/update-membership", + "api-reference/endpoints/organizations/delete-membership", + "api-reference/endpoints/organizations/bulk-delete-memberships", + "api-reference/endpoints/organizations/list-identity-memberships" + ] }, { - "group": "Events", - "pages": ["api-reference/endpoints/events/project-events"] + "group": "Organization Groups", + "pages": [ + "api-reference/endpoints/groups/create", + "api-reference/endpoints/groups/update", + "api-reference/endpoints/groups/delete", + "api-reference/endpoints/groups/get", + "api-reference/endpoints/groups/get-by-id", + "api-reference/endpoints/groups/add-group-user", + "api-reference/endpoints/groups/remove-group-user", + "api-reference/endpoints/groups/list-group-users" + ] }, { "group": "Organization Identities", @@ -911,187 +929,34 @@ ] }, { - "group": "Token Auth", + "group": "OIDC SSO", "pages": [ - "api-reference/endpoints/token-auth/attach", - "api-reference/endpoints/token-auth/retrieve", - "api-reference/endpoints/token-auth/update", - "api-reference/endpoints/token-auth/revoke", - "api-reference/endpoints/token-auth/get-tokens", - "api-reference/endpoints/token-auth/create-token", - "api-reference/endpoints/token-auth/update-token", - "api-reference/endpoints/token-auth/revoke-token" + "api-reference/endpoints/organizations/oidc-sso/get-oidc-config", + "api-reference/endpoints/organizations/oidc-sso/update-oidc-config", + "api-reference/endpoints/organizations/oidc-sso/create-oidc-config" ] }, { - "group": "Universal Auth", + "group": "LDAP SSO", "pages": [ - "api-reference/endpoints/universal-auth/login", - "api-reference/endpoints/universal-auth/attach", - "api-reference/endpoints/universal-auth/retrieve", - "api-reference/endpoints/universal-auth/update", - "api-reference/endpoints/universal-auth/revoke", - "api-reference/endpoints/universal-auth/create-client-secret", - "api-reference/endpoints/universal-auth/list-client-secrets", - "api-reference/endpoints/universal-auth/revoke-client-secret", - "api-reference/endpoints/universal-auth/get-client-secret-by-id", - "api-reference/endpoints/universal-auth/renew-access-token", - "api-reference/endpoints/universal-auth/revoke-access-token" + "api-reference/endpoints/organizations/ldap-sso/get-ldap-config", + "api-reference/endpoints/organizations/ldap-sso/update-ldap-config", + "api-reference/endpoints/organizations/ldap-sso/create-ldap-config" ] }, { - "group": "GCP Auth", + "group": "SAML SSO", "pages": [ - "api-reference/endpoints/gcp-auth/login", - "api-reference/endpoints/gcp-auth/attach", - "api-reference/endpoints/gcp-auth/retrieve", - "api-reference/endpoints/gcp-auth/update", - "api-reference/endpoints/gcp-auth/revoke" + "api-reference/endpoints/organizations/saml-sso/get-saml-config", + "api-reference/endpoints/organizations/saml-sso/update-saml-config", + "api-reference/endpoints/organizations/saml-sso/create-saml-config" ] - }, - { - "group": "Alibaba Cloud Auth", - "pages": [ - "api-reference/endpoints/alicloud-auth/login", - "api-reference/endpoints/alicloud-auth/attach", - "api-reference/endpoints/alicloud-auth/retrieve", - "api-reference/endpoints/alicloud-auth/update", - "api-reference/endpoints/alicloud-auth/revoke" - ] - }, - { - "group": "TLS Certificate Auth", - "pages": [ - "api-reference/endpoints/tls-cert-auth/login", - "api-reference/endpoints/tls-cert-auth/attach", - "api-reference/endpoints/tls-cert-auth/retrieve", - "api-reference/endpoints/tls-cert-auth/update", - "api-reference/endpoints/tls-cert-auth/revoke" - ] - }, - { - "group": "AWS Auth", - "pages": [ - "api-reference/endpoints/aws-auth/login", - "api-reference/endpoints/aws-auth/attach", - "api-reference/endpoints/aws-auth/retrieve", - "api-reference/endpoints/aws-auth/update", - "api-reference/endpoints/aws-auth/revoke" - ] - }, - { - "group": "OCI Auth", - "pages": [ - "api-reference/endpoints/oci-auth/login", - "api-reference/endpoints/oci-auth/attach", - "api-reference/endpoints/oci-auth/retrieve", - "api-reference/endpoints/oci-auth/update", - "api-reference/endpoints/oci-auth/revoke" - ] - }, - { - "group": "Azure Auth", - "pages": [ - "api-reference/endpoints/azure-auth/login", - "api-reference/endpoints/azure-auth/attach", - "api-reference/endpoints/azure-auth/retrieve", - "api-reference/endpoints/azure-auth/update", - "api-reference/endpoints/azure-auth/revoke" - ] - }, - { - "group": "Kubernetes Auth", - "pages": [ - "api-reference/endpoints/kubernetes-auth/login", - "api-reference/endpoints/kubernetes-auth/attach", - "api-reference/endpoints/kubernetes-auth/retrieve", - "api-reference/endpoints/kubernetes-auth/update", - "api-reference/endpoints/kubernetes-auth/revoke" - ] - }, - { - "group": "OIDC Auth", - "pages": [ - "api-reference/endpoints/oidc-auth/login", - "api-reference/endpoints/oidc-auth/attach", - "api-reference/endpoints/oidc-auth/retrieve", - "api-reference/endpoints/oidc-auth/update", - "api-reference/endpoints/oidc-auth/revoke" - ] - }, - { - "group": "JWT Auth", - "pages": [ - "api-reference/endpoints/jwt-auth/login", - "api-reference/endpoints/jwt-auth/attach", - "api-reference/endpoints/jwt-auth/retrieve", - "api-reference/endpoints/jwt-auth/update", - "api-reference/endpoints/jwt-auth/revoke" - ] - }, - { - "group": "LDAP Auth", - "pages": [ - "api-reference/endpoints/ldap-auth/login", - "api-reference/endpoints/ldap-auth/attach", - "api-reference/endpoints/ldap-auth/retrieve", - "api-reference/endpoints/ldap-auth/update", - "api-reference/endpoints/ldap-auth/revoke" - ] - }, - { - "group": "Groups", - "pages": [ - "api-reference/endpoints/groups/create", - "api-reference/endpoints/groups/update", - "api-reference/endpoints/groups/delete", - "api-reference/endpoints/groups/get", - "api-reference/endpoints/groups/get-by-id", - "api-reference/endpoints/groups/add-group-user", - "api-reference/endpoints/groups/remove-group-user", - "api-reference/endpoints/groups/list-group-users" - ] - }, - { - "group": "Organizations", - "pages": [ - { - "group": "OIDC SSO", - "pages": [ - "api-reference/endpoints/organizations/oidc-sso/get-oidc-config", - "api-reference/endpoints/organizations/oidc-sso/update-oidc-config", - "api-reference/endpoints/organizations/oidc-sso/create-oidc-config" - ] - }, - { - "group": "LDAP SSO", - "pages": [ - "api-reference/endpoints/organizations/ldap-sso/get-ldap-config", - "api-reference/endpoints/organizations/ldap-sso/update-ldap-config", - "api-reference/endpoints/organizations/ldap-sso/create-ldap-config" - ] - }, - { - "group": "SAML SSO", - "pages": [ - "api-reference/endpoints/organizations/saml-sso/get-saml-config", - "api-reference/endpoints/organizations/saml-sso/update-saml-config", - "api-reference/endpoints/organizations/saml-sso/create-saml-config" - ] - }, - "api-reference/endpoints/organizations/memberships", - "api-reference/endpoints/organizations/update-membership", - "api-reference/endpoints/organizations/delete-membership", - "api-reference/endpoints/organizations/bulk-delete-memberships", - "api-reference/endpoints/organizations/list-identity-memberships", - { - "group": "Legacy", - "pages": [ - "api-reference/endpoints/deprecated/organizations/projects" - ] - } - ] - }, + } + ] + }, + { + "group": "Project", + "pages": [ { "group": "Projects", "pages": [ @@ -1109,6 +974,7 @@ "api-reference/endpoints/deprecated/projects/create-project", "api-reference/endpoints/deprecated/projects/delete-project", "api-reference/endpoints/deprecated/projects/get-project", + "api-reference/endpoints/deprecated/organizations/projects", "api-reference/endpoints/deprecated/projects/update-project", "api-reference/endpoints/deprecated/projects/secret-snapshots" ] @@ -1185,6 +1051,16 @@ } ] }, + { + "group": "Project Templates", + "pages": [ + "api-reference/endpoints/project-templates/create", + "api-reference/endpoints/project-templates/update", + "api-reference/endpoints/project-templates/delete", + "api-reference/endpoints/project-templates/get-by-id", + "api-reference/endpoints/project-templates/list" + ] + }, { "group": "Project Roles", "pages": [ @@ -1205,349 +1081,6 @@ } ] }, - { - "group": "Project Templates", - "pages": [ - "api-reference/endpoints/project-templates/create", - "api-reference/endpoints/project-templates/update", - "api-reference/endpoints/project-templates/delete", - "api-reference/endpoints/project-templates/get-by-id", - "api-reference/endpoints/project-templates/list" - ] - }, - { - "group": "Environments", - "pages": [ - "api-reference/endpoints/environments/create", - "api-reference/endpoints/environments/update", - "api-reference/endpoints/environments/delete", - { - "group": "Legacy", - "pages": [ - "api-reference/endpoints/deprecated/environments/create", - "api-reference/endpoints/deprecated/environments/update", - "api-reference/endpoints/deprecated/environments/delete" - ] - } - ] - }, - { - "group": "Folders", - "pages": [ - "api-reference/endpoints/folders/list", - "api-reference/endpoints/folders/get-by-id", - "api-reference/endpoints/folders/create", - "api-reference/endpoints/folders/update", - "api-reference/endpoints/folders/delete", - { - "group": "Legacy", - "pages": [ - "api-reference/endpoints/deprecated/folders/list", - "api-reference/endpoints/deprecated/folders/get-by-id", - "api-reference/endpoints/deprecated/folders/create", - "api-reference/endpoints/deprecated/folders/update", - "api-reference/endpoints/deprecated/folders/delete" - ] - } - ] - }, - { - "group": "Secret Tags", - "pages": [ - "api-reference/endpoints/secret-tags/list", - "api-reference/endpoints/secret-tags/get-by-id", - "api-reference/endpoints/secret-tags/get-by-slug", - "api-reference/endpoints/secret-tags/create", - "api-reference/endpoints/secret-tags/update", - "api-reference/endpoints/secret-tags/delete", - { - "group": "Legacy", - "pages": [ - "api-reference/endpoints/deprecated/secret-tags/list", - "api-reference/endpoints/deprecated/secret-tags/get-by-id", - "api-reference/endpoints/deprecated/secret-tags/get-by-slug", - "api-reference/endpoints/deprecated/secret-tags/create", - "api-reference/endpoints/deprecated/secret-tags/update", - "api-reference/endpoints/deprecated/secret-tags/delete" - ] - } - ] - }, - { - "group": "Secrets", - "pages": [ - "api-reference/endpoints/secrets/list", - "api-reference/endpoints/secrets/create", - "api-reference/endpoints/secrets/read", - "api-reference/endpoints/secrets/update", - "api-reference/endpoints/secrets/delete", - "api-reference/endpoints/secrets/create-many", - "api-reference/endpoints/secrets/update-many", - "api-reference/endpoints/secrets/delete-many", - { - "group": "Legacy", - "pages": [ - "api-reference/endpoints/deprecated/secrets/list", - "api-reference/endpoints/deprecated/secrets/create", - "api-reference/endpoints/deprecated/secrets/read", - "api-reference/endpoints/deprecated/secrets/update", - "api-reference/endpoints/deprecated/secrets/delete", - "api-reference/endpoints/deprecated/secrets/create-many", - "api-reference/endpoints/deprecated/secrets/update-many", - "api-reference/endpoints/deprecated/secrets/delete-many", - "api-reference/endpoints/deprecated/secrets/attach-tags", - "api-reference/endpoints/deprecated/secrets/detach-tags" - ] - } - ] - }, - { - "group": "Dynamic Secrets", - "pages": [ - { - "group": "Kubernetes", - "pages": [ - "api-reference/endpoints/dynamic-secrets/kubernetes/create-lease" - ] - }, - "api-reference/endpoints/dynamic-secrets/create", - "api-reference/endpoints/dynamic-secrets/update", - "api-reference/endpoints/dynamic-secrets/delete", - "api-reference/endpoints/dynamic-secrets/get", - "api-reference/endpoints/dynamic-secrets/list", - "api-reference/endpoints/dynamic-secrets/list-leases", - "api-reference/endpoints/dynamic-secrets/create-lease", - "api-reference/endpoints/dynamic-secrets/delete-lease", - "api-reference/endpoints/dynamic-secrets/renew-lease", - "api-reference/endpoints/dynamic-secrets/get-lease" - ] - }, - { - "group": "Secret Imports", - "pages": [ - "api-reference/endpoints/secret-imports/list", - "api-reference/endpoints/secret-imports/create", - "api-reference/endpoints/secret-imports/update", - "api-reference/endpoints/secret-imports/delete", - { - "group": "Legacy", - "pages": [ - "api-reference/endpoints/deprecated/secret-imports/list", - "api-reference/endpoints/deprecated/secret-imports/create", - "api-reference/endpoints/deprecated/secret-imports/update", - "api-reference/endpoints/deprecated/secret-imports/delete" - ] - } - ] - }, - { - "group": "Secret Rotations", - "pages": [ - "api-reference/endpoints/secret-rotations/list", - "api-reference/endpoints/secret-rotations/options", - { - "group": "Auth0 Client Secret", - "pages": [ - "api-reference/endpoints/secret-rotations/auth0-client-secret/create", - "api-reference/endpoints/secret-rotations/auth0-client-secret/delete", - "api-reference/endpoints/secret-rotations/auth0-client-secret/get-by-id", - "api-reference/endpoints/secret-rotations/auth0-client-secret/get-by-name", - "api-reference/endpoints/secret-rotations/auth0-client-secret/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/auth0-client-secret/list", - "api-reference/endpoints/secret-rotations/auth0-client-secret/rotate-secrets", - "api-reference/endpoints/secret-rotations/auth0-client-secret/update" - ] - }, - { - "group": "AWS IAM User Secret", - "pages": [ - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/create", - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/delete", - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/get-by-id", - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/get-by-name", - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/list", - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/rotate-secrets", - "api-reference/endpoints/secret-rotations/aws-iam-user-secret/update" - ] - }, - { - "group": "Azure Client Secret", - "pages": [ - "api-reference/endpoints/secret-rotations/azure-client-secret/create", - "api-reference/endpoints/secret-rotations/azure-client-secret/delete", - "api-reference/endpoints/secret-rotations/azure-client-secret/get-by-id", - "api-reference/endpoints/secret-rotations/azure-client-secret/get-by-name", - "api-reference/endpoints/secret-rotations/azure-client-secret/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/azure-client-secret/list", - "api-reference/endpoints/secret-rotations/azure-client-secret/rotate-secrets", - "api-reference/endpoints/secret-rotations/azure-client-secret/update" - ] - }, - { - "group": "LDAP Password", - "pages": [ - "api-reference/endpoints/secret-rotations/ldap-password/create", - "api-reference/endpoints/secret-rotations/ldap-password/delete", - "api-reference/endpoints/secret-rotations/ldap-password/get-by-id", - "api-reference/endpoints/secret-rotations/ldap-password/get-by-name", - "api-reference/endpoints/secret-rotations/ldap-password/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/ldap-password/list", - "api-reference/endpoints/secret-rotations/ldap-password/rotate-secrets", - "api-reference/endpoints/secret-rotations/ldap-password/update" - ] - }, - { - "group": "Microsoft SQL Server Credentials", - "pages": [ - "api-reference/endpoints/secret-rotations/mssql-credentials/create", - "api-reference/endpoints/secret-rotations/mssql-credentials/delete", - "api-reference/endpoints/secret-rotations/mssql-credentials/get-by-id", - "api-reference/endpoints/secret-rotations/mssql-credentials/get-by-name", - "api-reference/endpoints/secret-rotations/mssql-credentials/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/mssql-credentials/list", - "api-reference/endpoints/secret-rotations/mssql-credentials/rotate-secrets", - "api-reference/endpoints/secret-rotations/mssql-credentials/update" - ] - }, - { - "group": "MySQL Credentials", - "pages": [ - "api-reference/endpoints/secret-rotations/mysql-credentials/create", - "api-reference/endpoints/secret-rotations/mysql-credentials/delete", - "api-reference/endpoints/secret-rotations/mysql-credentials/get-by-id", - "api-reference/endpoints/secret-rotations/mysql-credentials/get-by-name", - "api-reference/endpoints/secret-rotations/mysql-credentials/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/mysql-credentials/list", - "api-reference/endpoints/secret-rotations/mysql-credentials/rotate-secrets", - "api-reference/endpoints/secret-rotations/mysql-credentials/update" - ] - }, - { - "group": "Okta Client Secret", - "pages": [ - "api-reference/endpoints/secret-rotations/okta-client-secret/create", - "api-reference/endpoints/secret-rotations/okta-client-secret/delete", - "api-reference/endpoints/secret-rotations/okta-client-secret/get-by-id", - "api-reference/endpoints/secret-rotations/okta-client-secret/get-by-name", - "api-reference/endpoints/secret-rotations/okta-client-secret/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/okta-client-secret/list", - "api-reference/endpoints/secret-rotations/okta-client-secret/rotate-secrets", - "api-reference/endpoints/secret-rotations/okta-client-secret/update" - ] - }, - { - "group": "OracleDB Credentials", - "pages": [ - "api-reference/endpoints/secret-rotations/oracledb-credentials/create", - "api-reference/endpoints/secret-rotations/oracledb-credentials/delete", - "api-reference/endpoints/secret-rotations/oracledb-credentials/get-by-id", - "api-reference/endpoints/secret-rotations/oracledb-credentials/get-by-name", - "api-reference/endpoints/secret-rotations/oracledb-credentials/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/oracledb-credentials/list", - "api-reference/endpoints/secret-rotations/oracledb-credentials/rotate-secrets", - "api-reference/endpoints/secret-rotations/oracledb-credentials/update" - ] - }, - { - "group": "PostgreSQL Credentials", - "pages": [ - "api-reference/endpoints/secret-rotations/postgres-credentials/create", - "api-reference/endpoints/secret-rotations/postgres-credentials/delete", - "api-reference/endpoints/secret-rotations/postgres-credentials/get-by-id", - "api-reference/endpoints/secret-rotations/postgres-credentials/get-by-name", - "api-reference/endpoints/secret-rotations/postgres-credentials/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/postgres-credentials/list", - "api-reference/endpoints/secret-rotations/postgres-credentials/rotate-secrets", - "api-reference/endpoints/secret-rotations/postgres-credentials/update" - ] - }, - { - "group": "Redis Credentials", - "pages": [ - "api-reference/endpoints/secret-rotations/redis-credentials/create", - "api-reference/endpoints/secret-rotations/redis-credentials/delete", - "api-reference/endpoints/secret-rotations/redis-credentials/get-by-id", - "api-reference/endpoints/secret-rotations/redis-credentials/get-by-name", - "api-reference/endpoints/secret-rotations/redis-credentials/get-generated-credentials-by-id", - "api-reference/endpoints/secret-rotations/redis-credentials/list", - "api-reference/endpoints/secret-rotations/redis-credentials/rotate-secrets", - "api-reference/endpoints/secret-rotations/redis-credentials/update" - ] - } - ] - }, - { - "group": "Secret Scanning", - "pages": [ - { - "group": "Data Sources", - "pages": [ - "api-reference/endpoints/secret-scanning/data-sources/list", - "api-reference/endpoints/secret-scanning/data-sources/options", - { - "group": "Bitbucket", - "pages": [ - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/list", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/get-by-id", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/get-by-name", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/list-resources", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/list-scans", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/create", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/update", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/delete", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/scan", - "api-reference/endpoints/secret-scanning/data-sources/bitbucket/scan-resource" - ] - }, - { - "group": "GitHub", - "pages": [ - "api-reference/endpoints/secret-scanning/data-sources/github/list", - "api-reference/endpoints/secret-scanning/data-sources/github/get-by-id", - "api-reference/endpoints/secret-scanning/data-sources/github/get-by-name", - "api-reference/endpoints/secret-scanning/data-sources/github/list-resources", - "api-reference/endpoints/secret-scanning/data-sources/github/list-scans", - "api-reference/endpoints/secret-scanning/data-sources/github/create", - "api-reference/endpoints/secret-scanning/data-sources/github/update", - "api-reference/endpoints/secret-scanning/data-sources/github/delete", - "api-reference/endpoints/secret-scanning/data-sources/github/scan", - "api-reference/endpoints/secret-scanning/data-sources/github/scan-resource" - ] - }, - { - "group": "GitLab", - "pages": [ - "api-reference/endpoints/secret-scanning/data-sources/gitlab/list", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/get-by-id", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/get-by-name", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/list-resources", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/list-scans", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/create", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/update", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/delete", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/scan", - "api-reference/endpoints/secret-scanning/data-sources/gitlab/scan-resource" - ] - } - ] - }, - { - "group": "Findings", - "pages": [ - "api-reference/endpoints/secret-scanning/findings/list", - "api-reference/endpoints/secret-scanning/findings/update" - ] - }, - { - "group": "Configuration", - "pages": [ - "api-reference/endpoints/secret-scanning/config/get-by-project-id", - "api-reference/endpoints/secret-scanning/config/update" - ] - } - ] - }, { "group": "Identity Specific Privilege", "pages": [ @@ -1575,6 +1108,15 @@ } ] }, + { + "group": "Service Tokens", + "pages": ["api-reference/endpoints/service-tokens/get"] + } + ] + }, + { + "group": "Shared", + "pages": [ { "group": "App Connections", "pages": [ @@ -2085,6 +1627,415 @@ } ] }, + { + "group": "Audit Logs", + "pages": ["api-reference/endpoints/audit-logs/export-audit-log"] + }, + { + "group": "Events", + "pages": ["api-reference/endpoints/events/project-events"] + } + ] + }, + { + "group": "Identity Auth", + "pages": [ + { + "group": "Token Auth", + "pages": [ + "api-reference/endpoints/token-auth/attach", + "api-reference/endpoints/token-auth/retrieve", + "api-reference/endpoints/token-auth/update", + "api-reference/endpoints/token-auth/revoke", + "api-reference/endpoints/token-auth/get-tokens", + "api-reference/endpoints/token-auth/create-token", + "api-reference/endpoints/token-auth/update-token", + "api-reference/endpoints/token-auth/revoke-token" + ] + }, + { + "group": "Universal Auth", + "pages": [ + "api-reference/endpoints/universal-auth/login", + "api-reference/endpoints/universal-auth/attach", + "api-reference/endpoints/universal-auth/retrieve", + "api-reference/endpoints/universal-auth/update", + "api-reference/endpoints/universal-auth/revoke", + "api-reference/endpoints/universal-auth/create-client-secret", + "api-reference/endpoints/universal-auth/list-client-secrets", + "api-reference/endpoints/universal-auth/revoke-client-secret", + "api-reference/endpoints/universal-auth/get-client-secret-by-id", + "api-reference/endpoints/universal-auth/renew-access-token", + "api-reference/endpoints/universal-auth/revoke-access-token" + ] + }, + { + "group": "GCP Auth", + "pages": [ + "api-reference/endpoints/gcp-auth/login", + "api-reference/endpoints/gcp-auth/attach", + "api-reference/endpoints/gcp-auth/retrieve", + "api-reference/endpoints/gcp-auth/update", + "api-reference/endpoints/gcp-auth/revoke" + ] + }, + { + "group": "Alibaba Cloud Auth", + "pages": [ + "api-reference/endpoints/alicloud-auth/login", + "api-reference/endpoints/alicloud-auth/attach", + "api-reference/endpoints/alicloud-auth/retrieve", + "api-reference/endpoints/alicloud-auth/update", + "api-reference/endpoints/alicloud-auth/revoke" + ] + }, + { + "group": "TLS Certificate Auth", + "pages": [ + "api-reference/endpoints/tls-cert-auth/login", + "api-reference/endpoints/tls-cert-auth/attach", + "api-reference/endpoints/tls-cert-auth/retrieve", + "api-reference/endpoints/tls-cert-auth/update", + "api-reference/endpoints/tls-cert-auth/revoke" + ] + }, + { + "group": "AWS Auth", + "pages": [ + "api-reference/endpoints/aws-auth/login", + "api-reference/endpoints/aws-auth/attach", + "api-reference/endpoints/aws-auth/retrieve", + "api-reference/endpoints/aws-auth/update", + "api-reference/endpoints/aws-auth/revoke" + ] + }, + { + "group": "OCI Auth", + "pages": [ + "api-reference/endpoints/oci-auth/login", + "api-reference/endpoints/oci-auth/attach", + "api-reference/endpoints/oci-auth/retrieve", + "api-reference/endpoints/oci-auth/update", + "api-reference/endpoints/oci-auth/revoke" + ] + }, + { + "group": "Azure Auth", + "pages": [ + "api-reference/endpoints/azure-auth/login", + "api-reference/endpoints/azure-auth/attach", + "api-reference/endpoints/azure-auth/retrieve", + "api-reference/endpoints/azure-auth/update", + "api-reference/endpoints/azure-auth/revoke" + ] + }, + { + "group": "Kubernetes Auth", + "pages": [ + "api-reference/endpoints/kubernetes-auth/login", + "api-reference/endpoints/kubernetes-auth/attach", + "api-reference/endpoints/kubernetes-auth/retrieve", + "api-reference/endpoints/kubernetes-auth/update", + "api-reference/endpoints/kubernetes-auth/revoke" + ] + }, + { + "group": "OIDC Auth", + "pages": [ + "api-reference/endpoints/oidc-auth/login", + "api-reference/endpoints/oidc-auth/attach", + "api-reference/endpoints/oidc-auth/retrieve", + "api-reference/endpoints/oidc-auth/update", + "api-reference/endpoints/oidc-auth/revoke" + ] + }, + { + "group": "JWT Auth", + "pages": [ + "api-reference/endpoints/jwt-auth/login", + "api-reference/endpoints/jwt-auth/attach", + "api-reference/endpoints/jwt-auth/retrieve", + "api-reference/endpoints/jwt-auth/update", + "api-reference/endpoints/jwt-auth/revoke" + ] + }, + { + "group": "LDAP Auth", + "pages": [ + "api-reference/endpoints/ldap-auth/login", + "api-reference/endpoints/ldap-auth/attach", + "api-reference/endpoints/ldap-auth/retrieve", + "api-reference/endpoints/ldap-auth/update", + "api-reference/endpoints/ldap-auth/revoke" + ] + } + ] + }, + { + "group": "Secrets Management", + "pages": [ + { + "group": "Environments", + "pages": [ + "api-reference/endpoints/environments/create", + "api-reference/endpoints/environments/update", + "api-reference/endpoints/environments/delete", + { + "group": "Legacy", + "pages": [ + "api-reference/endpoints/deprecated/environments/create", + "api-reference/endpoints/deprecated/environments/update", + "api-reference/endpoints/deprecated/environments/delete" + ] + } + ] + }, + { + "group": "Folders", + "pages": [ + "api-reference/endpoints/folders/list", + "api-reference/endpoints/folders/get-by-id", + "api-reference/endpoints/folders/create", + "api-reference/endpoints/folders/update", + "api-reference/endpoints/folders/delete", + { + "group": "Legacy", + "pages": [ + "api-reference/endpoints/deprecated/folders/list", + "api-reference/endpoints/deprecated/folders/get-by-id", + "api-reference/endpoints/deprecated/folders/create", + "api-reference/endpoints/deprecated/folders/update", + "api-reference/endpoints/deprecated/folders/delete" + ] + } + ] + }, + { + "group": "Secrets", + "pages": [ + "api-reference/endpoints/secrets/list", + "api-reference/endpoints/secrets/create", + "api-reference/endpoints/secrets/read", + "api-reference/endpoints/secrets/update", + "api-reference/endpoints/secrets/delete", + "api-reference/endpoints/secrets/create-many", + "api-reference/endpoints/secrets/update-many", + "api-reference/endpoints/secrets/delete-many", + { + "group": "Legacy", + "pages": [ + "api-reference/endpoints/deprecated/secrets/list", + "api-reference/endpoints/deprecated/secrets/create", + "api-reference/endpoints/deprecated/secrets/read", + "api-reference/endpoints/deprecated/secrets/update", + "api-reference/endpoints/deprecated/secrets/delete", + "api-reference/endpoints/deprecated/secrets/create-many", + "api-reference/endpoints/deprecated/secrets/update-many", + "api-reference/endpoints/deprecated/secrets/delete-many", + "api-reference/endpoints/deprecated/secrets/attach-tags", + "api-reference/endpoints/deprecated/secrets/detach-tags" + ] + } + ] + }, + { + "group": "Secret Tags", + "pages": [ + "api-reference/endpoints/secret-tags/list", + "api-reference/endpoints/secret-tags/get-by-id", + "api-reference/endpoints/secret-tags/get-by-slug", + "api-reference/endpoints/secret-tags/create", + "api-reference/endpoints/secret-tags/update", + "api-reference/endpoints/secret-tags/delete", + { + "group": "Legacy", + "pages": [ + "api-reference/endpoints/deprecated/secret-tags/list", + "api-reference/endpoints/deprecated/secret-tags/get-by-id", + "api-reference/endpoints/deprecated/secret-tags/get-by-slug", + "api-reference/endpoints/deprecated/secret-tags/create", + "api-reference/endpoints/deprecated/secret-tags/update", + "api-reference/endpoints/deprecated/secret-tags/delete" + ] + } + ] + }, + { + "group": "Secret Imports", + "pages": [ + "api-reference/endpoints/secret-imports/list", + "api-reference/endpoints/secret-imports/create", + "api-reference/endpoints/secret-imports/update", + "api-reference/endpoints/secret-imports/delete", + { + "group": "Legacy", + "pages": [ + "api-reference/endpoints/deprecated/secret-imports/list", + "api-reference/endpoints/deprecated/secret-imports/create", + "api-reference/endpoints/deprecated/secret-imports/update", + "api-reference/endpoints/deprecated/secret-imports/delete" + ] + } + ] + }, + { + "group": "Dynamic Secrets", + "pages": [ + { + "group": "Kubernetes", + "pages": [ + "api-reference/endpoints/dynamic-secrets/kubernetes/create-lease" + ] + }, + "api-reference/endpoints/dynamic-secrets/create", + "api-reference/endpoints/dynamic-secrets/update", + "api-reference/endpoints/dynamic-secrets/delete", + "api-reference/endpoints/dynamic-secrets/get", + "api-reference/endpoints/dynamic-secrets/list", + "api-reference/endpoints/dynamic-secrets/list-leases", + "api-reference/endpoints/dynamic-secrets/create-lease", + "api-reference/endpoints/dynamic-secrets/delete-lease", + "api-reference/endpoints/dynamic-secrets/renew-lease", + "api-reference/endpoints/dynamic-secrets/get-lease" + ] + }, + { + "group": "Secret Rotations", + "pages": [ + "api-reference/endpoints/secret-rotations/list", + "api-reference/endpoints/secret-rotations/options", + { + "group": "Auth0 Client Secret", + "pages": [ + "api-reference/endpoints/secret-rotations/auth0-client-secret/create", + "api-reference/endpoints/secret-rotations/auth0-client-secret/delete", + "api-reference/endpoints/secret-rotations/auth0-client-secret/get-by-id", + "api-reference/endpoints/secret-rotations/auth0-client-secret/get-by-name", + "api-reference/endpoints/secret-rotations/auth0-client-secret/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/auth0-client-secret/list", + "api-reference/endpoints/secret-rotations/auth0-client-secret/rotate-secrets", + "api-reference/endpoints/secret-rotations/auth0-client-secret/update" + ] + }, + { + "group": "AWS IAM User Secret", + "pages": [ + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/create", + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/delete", + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/get-by-id", + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/get-by-name", + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/list", + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/rotate-secrets", + "api-reference/endpoints/secret-rotations/aws-iam-user-secret/update" + ] + }, + { + "group": "Azure Client Secret", + "pages": [ + "api-reference/endpoints/secret-rotations/azure-client-secret/create", + "api-reference/endpoints/secret-rotations/azure-client-secret/delete", + "api-reference/endpoints/secret-rotations/azure-client-secret/get-by-id", + "api-reference/endpoints/secret-rotations/azure-client-secret/get-by-name", + "api-reference/endpoints/secret-rotations/azure-client-secret/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/azure-client-secret/list", + "api-reference/endpoints/secret-rotations/azure-client-secret/rotate-secrets", + "api-reference/endpoints/secret-rotations/azure-client-secret/update" + ] + }, + { + "group": "LDAP Password", + "pages": [ + "api-reference/endpoints/secret-rotations/ldap-password/create", + "api-reference/endpoints/secret-rotations/ldap-password/delete", + "api-reference/endpoints/secret-rotations/ldap-password/get-by-id", + "api-reference/endpoints/secret-rotations/ldap-password/get-by-name", + "api-reference/endpoints/secret-rotations/ldap-password/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/ldap-password/list", + "api-reference/endpoints/secret-rotations/ldap-password/rotate-secrets", + "api-reference/endpoints/secret-rotations/ldap-password/update" + ] + }, + { + "group": "Microsoft SQL Server Credentials", + "pages": [ + "api-reference/endpoints/secret-rotations/mssql-credentials/create", + "api-reference/endpoints/secret-rotations/mssql-credentials/delete", + "api-reference/endpoints/secret-rotations/mssql-credentials/get-by-id", + "api-reference/endpoints/secret-rotations/mssql-credentials/get-by-name", + "api-reference/endpoints/secret-rotations/mssql-credentials/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/mssql-credentials/list", + "api-reference/endpoints/secret-rotations/mssql-credentials/rotate-secrets", + "api-reference/endpoints/secret-rotations/mssql-credentials/update" + ] + }, + { + "group": "MySQL Credentials", + "pages": [ + "api-reference/endpoints/secret-rotations/mysql-credentials/create", + "api-reference/endpoints/secret-rotations/mysql-credentials/delete", + "api-reference/endpoints/secret-rotations/mysql-credentials/get-by-id", + "api-reference/endpoints/secret-rotations/mysql-credentials/get-by-name", + "api-reference/endpoints/secret-rotations/mysql-credentials/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/mysql-credentials/list", + "api-reference/endpoints/secret-rotations/mysql-credentials/rotate-secrets", + "api-reference/endpoints/secret-rotations/mysql-credentials/update" + ] + }, + { + "group": "Okta Client Secret", + "pages": [ + "api-reference/endpoints/secret-rotations/okta-client-secret/create", + "api-reference/endpoints/secret-rotations/okta-client-secret/delete", + "api-reference/endpoints/secret-rotations/okta-client-secret/get-by-id", + "api-reference/endpoints/secret-rotations/okta-client-secret/get-by-name", + "api-reference/endpoints/secret-rotations/okta-client-secret/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/okta-client-secret/list", + "api-reference/endpoints/secret-rotations/okta-client-secret/rotate-secrets", + "api-reference/endpoints/secret-rotations/okta-client-secret/update" + ] + }, + { + "group": "OracleDB Credentials", + "pages": [ + "api-reference/endpoints/secret-rotations/oracledb-credentials/create", + "api-reference/endpoints/secret-rotations/oracledb-credentials/delete", + "api-reference/endpoints/secret-rotations/oracledb-credentials/get-by-id", + "api-reference/endpoints/secret-rotations/oracledb-credentials/get-by-name", + "api-reference/endpoints/secret-rotations/oracledb-credentials/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/oracledb-credentials/list", + "api-reference/endpoints/secret-rotations/oracledb-credentials/rotate-secrets", + "api-reference/endpoints/secret-rotations/oracledb-credentials/update" + ] + }, + { + "group": "PostgreSQL Credentials", + "pages": [ + "api-reference/endpoints/secret-rotations/postgres-credentials/create", + "api-reference/endpoints/secret-rotations/postgres-credentials/delete", + "api-reference/endpoints/secret-rotations/postgres-credentials/get-by-id", + "api-reference/endpoints/secret-rotations/postgres-credentials/get-by-name", + "api-reference/endpoints/secret-rotations/postgres-credentials/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/postgres-credentials/list", + "api-reference/endpoints/secret-rotations/postgres-credentials/rotate-secrets", + "api-reference/endpoints/secret-rotations/postgres-credentials/update" + ] + }, + { + "group": "Redis Credentials", + "pages": [ + "api-reference/endpoints/secret-rotations/redis-credentials/create", + "api-reference/endpoints/secret-rotations/redis-credentials/delete", + "api-reference/endpoints/secret-rotations/redis-credentials/get-by-id", + "api-reference/endpoints/secret-rotations/redis-credentials/get-by-name", + "api-reference/endpoints/secret-rotations/redis-credentials/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/redis-credentials/list", + "api-reference/endpoints/secret-rotations/redis-credentials/rotate-secrets", + "api-reference/endpoints/secret-rotations/redis-credentials/update" + ] + } + ] + }, { "group": "Secret Syncs", "pages": [ @@ -2553,14 +2504,6 @@ "api-reference/endpoints/integrations/delete", "api-reference/endpoints/integrations/list-project-integrations" ] - }, - { - "group": "Service Tokens", - "pages": ["api-reference/endpoints/service-tokens/get"] - }, - { - "group": "Audit Logs", - "pages": ["api-reference/endpoints/audit-logs/export-audit-log"] } ] }, @@ -2716,6 +2659,77 @@ } ] }, + { + "group": "Secret Scanning", + "pages": [ + { + "group": "Data Sources", + "pages": [ + "api-reference/endpoints/secret-scanning/data-sources/list", + "api-reference/endpoints/secret-scanning/data-sources/options", + { + "group": "Bitbucket", + "pages": [ + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/list", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/get-by-id", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/get-by-name", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/list-resources", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/list-scans", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/create", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/update", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/delete", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/scan", + "api-reference/endpoints/secret-scanning/data-sources/bitbucket/scan-resource" + ] + }, + { + "group": "GitHub", + "pages": [ + "api-reference/endpoints/secret-scanning/data-sources/github/list", + "api-reference/endpoints/secret-scanning/data-sources/github/get-by-id", + "api-reference/endpoints/secret-scanning/data-sources/github/get-by-name", + "api-reference/endpoints/secret-scanning/data-sources/github/list-resources", + "api-reference/endpoints/secret-scanning/data-sources/github/list-scans", + "api-reference/endpoints/secret-scanning/data-sources/github/create", + "api-reference/endpoints/secret-scanning/data-sources/github/update", + "api-reference/endpoints/secret-scanning/data-sources/github/delete", + "api-reference/endpoints/secret-scanning/data-sources/github/scan", + "api-reference/endpoints/secret-scanning/data-sources/github/scan-resource" + ] + }, + { + "group": "GitLab", + "pages": [ + "api-reference/endpoints/secret-scanning/data-sources/gitlab/list", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/get-by-id", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/get-by-name", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/list-resources", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/list-scans", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/create", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/update", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/delete", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/scan", + "api-reference/endpoints/secret-scanning/data-sources/gitlab/scan-resource" + ] + } + ] + }, + { + "group": "Findings", + "pages": [ + "api-reference/endpoints/secret-scanning/findings/list", + "api-reference/endpoints/secret-scanning/findings/update" + ] + }, + { + "group": "Configuration", + "pages": [ + "api-reference/endpoints/secret-scanning/config/get-by-project-id", + "api-reference/endpoints/secret-scanning/config/update" + ] + } + ] + }, { "group": "Infisical SSH", "pages": [ @@ -2809,6 +2823,15 @@ ] } ] + }, + { + "group": "Other", + "pages": [ + { + "group": "Admin", + "pages": ["api-reference/endpoints/admin/bootstrap-instance"] + } + ] } ] }, diff --git a/docs/documentation/getting-started/introduction.mdx b/docs/documentation/getting-started/introduction.mdx index e10d594dad..77342e36cb 100644 --- a/docs/documentation/getting-started/introduction.mdx +++ b/docs/documentation/getting-started/introduction.mdx @@ -35,7 +35,7 @@ Infisical consists of several tightly integrated products, each designed to solv - [Secrets Management](/documentation/platform/secrets-mgmt/overview): Securely store, access, and distribute secrets across environments with fine-grained controls, automatic rotation, and audit logging. - [Secrets Scanning](/documentation/platform/secret-scanning/overview): Detect hardcoded secrets in code, CI pipelines, and infrastructure—integrated with GitHub, GitLab, Bitbucket, and more. -- [Infisical PKI](/documentation/platform/pki/overview): Issue and manage X.509 certificates using protocols like EST, with support for internal and external CAs. +- [Certificate Management](/documentation/platform/pki/overview): Issue and manage X.509 certificates using protocols like EST, with support for internal and external CAs. - [Infisical SSH](/documentation/platform/ssh/overview): Provide short-lived SSH access to servers using certificate-based authentication, replacing static keys with policy-driven, time-bound control. - [Infisical KMS](/documentation/platform/kms/overview): Encrypt and decrypt data using centrally managed keys with enforced access policies and full audit visibility. - [Infisical PAM](/documentation/platform/pam/overview): Manage access to resources like databases, servers, and accounts with policy-based controls and approvals. diff --git a/docs/documentation/platform/identities/machine-identities.mdx b/docs/documentation/platform/identities/machine-identities.mdx index d7b7663a9c..964a3ad7c7 100644 --- a/docs/documentation/platform/identities/machine-identities.mdx +++ b/docs/documentation/platform/identities/machine-identities.mdx @@ -16,15 +16,37 @@ Key Features: - Role Assignment: Identities must be assigned [roles](/documentation/platform/access-controls/role-based-access-controls). These roles determine the scope of access to resources, either at the organization level or project level. - Auth/Token Configuration: Identities must be configured with corresponding authentication methods and access token properties to securely interact with the Infisical API. +## Scopes + +Identities can be created either at the organization-level or the project-level. Outside of identity management and scope of operation, organization and project identities are functionally identical. + +- Project identities are managed at the project-level and can only operate within their respective project. +Project-level identities are useful for organizations that delegate responsibility to autonomous teams via projects. + +- Organization identities are managed at the organization-level and can be assigned to one or more projects, as well as +perform organization-level operations. Organization-level identities are useful for organizations that have cross-project operations. + ## Workflow -A typical workflow for using identities consists of four steps: + + + A typical workflow for using project identities consists of three steps: -1. Creating the identity with a name and [role](/documentation/platform/access-controls/role-based-access-controls) in Organization Access Control > Machine Identities. - This step also involves configuring an authentication method for it. -2. Adding the identity to the project(s) you want it to have access to. -3. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back. -4. Authenticating subsequent requests with the Infisical API using the short-lived access token. + 1. Creating the identity with a name and [role](/documentation/platform/access-controls/role-based-access-controls) in Project > Access Control > Machine Identities. + This step also involves configuring an authentication method for it. + 2. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back. + 3. Authenticating subsequent requests with the Infisical API using the short-lived access token. + + + A typical workflow for using organization identities consists of four steps: + + 1. Creating the identity with a name and [role](/documentation/platform/access-controls/role-based-access-controls) in Organization > Access Control > Machine Identities. + This step also involves configuring an authentication method for it. + 2. Adding the identity to the project(s) you want it to have access to. + 3. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back. + 4. Authenticating subsequent requests with the Infisical API using the short-lived access token. + + ## Authentication Methods diff --git a/docs/documentation/platform/pki/ca/acme-ca.mdx b/docs/documentation/platform/pki/ca/acme-ca.mdx index 76f5cdc8f5..774590c738 100644 --- a/docs/documentation/platform/pki/ca/acme-ca.mdx +++ b/docs/documentation/platform/pki/ca/acme-ca.mdx @@ -1,66 +1,63 @@ --- title: "ACME-compatible CA" -description: "Learn how to automatically provision and manage TLS certificates using ACME Certificate Authorities like Let's Encrypt with Infisical PKI" +description: "Learn how to connect Infisical to an ACME-compatible CA to issue certificates." --- ## Concept -The Infisical ACME integration allows you to connect with ACME (Automatic Certificate Management Environment) Certificate Authorities to automatically issue and manage publicly trusted TLS certificates for your [subscribers](/documentation/platform/pki/subscribers). This integration enables you to leverage established public CA infrastructure like Let's Encrypt while centralizing your certificate management within Infisical. +Infisical can connect to any upstream ACME-compatible CA (e.g. Lets's Encrypt, DigiCert, etc.) supporting the [ACME protocol](https://en.wikipedia.org/wiki/Automatic_Certificate_Management_Environment) to issue certificates back to your end-entities. This integration uses the [DNS-01 challenge](https://letsencrypt.org/docs/challenge-types/#dns-01-challenge) method as part of the ACME domain validation challenge workflow for a requested certificate. -ACME is a protocol that automates the process of certificate issuance and renewal through domain validation challenges. The integration is perfect for obtaining trusted X.509 certificates for public-facing services and is capable of automatically renewing certificates as needed. +The upstream ACME-compatible CA integration lets you connect Infisical to providers by specifying +their **ACME Directory URL** such as: + +- [Let's Encrypt](/documentation/platform/pki/ca/lets-encrypt): `https://acme-v02.api.letsencrypt.org/directory`. +- [DigiCert](/documentation/platform/pki/ca/digicert): `https://acme.digicert.com/v2/acme/directory`. +- Google GTS: `https://dv.acme-v02.api.pki.goog/directory`. +- Buypass: `https://api.buypass.com/acme/directory`. +- ZeroSSL: `https://acme.zerossl.com/v2/DV90`. +- SSL.com: `https://acme.ssl.com/sslcom-dv-rsa`. + +When Infisical requests a certificate from an ACME-compatible CA, it creates a TXT record at `_acme-challenge.{your-domain}` in your configured DNS provider (e.g. Route53, Cloudflare, etc.); this TXT record contains the challenge token issued by the ACME-compatible CA to validate domain control for the requested certificate. +The ACME provider checks for the existence of this TXT record to verify domain control before issuing the certificate back to Infisical. + +After validation completes successfully, Infisical automatically removes the TXT record from your DNS provider.
```mermaid graph TD - A[ACME CA Provider
e.g., Let's Encrypt] <-->|ACME v2 Protocol| B[Infisical] - B -->|Creates TXT Records
via Route53/Cloudflare| C[DNS Validation] - B -->|Manages Certificates| D[Subscribers] + A[ACME-compatible CA] <-->|ACME v2 Protocol| B[Infisical] + B -->|Creates TXT Records
via DNS Provider| C[DNS Validation] + B -->|Manages Certificates| D[End-Entities] ```
-As part of the workflow, you configure DNS provider credentials, register an ACME CA provider with Infisical, and create subscribers to represent the certificates you wish to issue. Each issued certificate is automatically managed through its lifecycle, including renewal before expiration. - -We recommend reading about [ACME protocol](https://tools.ietf.org/html/rfc8555) and [DNS-01 challenges](https://letsencrypt.org/docs/challenge-types/#dns-01-challenge) for a fuller understanding of the underlying technology. +We recommend reading about [ACME protocol](https://tools.ietf.org/html/rfc8555) and [DNS-01 challenges](https://letsencrypt.org/docs/challenge-types/#dns-01-challenge) for a fuller understanding of the underlying workflow. ## Workflow -A typical workflow for using Infisical with ACME Certificate Authorities consists of the following steps: +A typical workflow for using Infisical with an external ACME-compatible CA consists of the following steps: -1. Setting up AWS Route53 or Cloudflare credentials with appropriate DNS permissions. -2. Creating an AWS/Cloudflare connection in Infisical to store the credentials. -3. Registering an ACME Certificate Authority (like Let's Encrypt) with Infisical. -4. Creating subscribers that use the ACME CA as their issuing authority. -5. Managing certificate lifecycle events such as issuance, renewal, and revocation through Infisical. +1. Setting up your DNS provider (e.g. Route53, Cloudflare, etc.) with appropriate DNS permissions. +2. Creating an [App Connection](/integrations/app-connections/overview) in Infisical to store credentials for Infisical to connect to your DNS provider and create/remove DNS records as part of the DNS-01 challenge. +3. Registering an [External CA](/documentation/platform/pki/ca/external-ca) in Infisical with the ACME type and inputting required configuration including the **ACME Directory URL** of the upstream ACME-compatible CA and the **App Connection** for your DNS provider. -## Understanding ACME DNS-01 Challenge +Once this is complete, you can create a [certificate profile](/documentation/platform/pki/certificates/profiles) linked to the External CA proceed to request a certificate against it. -The DNS-01 challenge is the method used by ACME CA providers to verify that you control a domain before issuing a certificate. Here's how Infisical handles this process: +## Guide to Connecting Infisical to an ACME-compatible CA -1. **Challenge Request**: When you request a certificate, the ACME provider (like Let's Encrypt) issues a challenge token. - -2. **DNS Record Creation**: Infisical creates a TXT record at `_acme-challenge.` with a value derived from the challenge token. - -3. **DNS Propagation**: The TXT record must propagate through the DNS system (usually takes a few minutes, depending on TTL settings). - -4. **Validation**: The ACME provider checks for the existence of this TXT record to verify domain control. - -5. **Cleanup**: After validation completes successfully, Infisical automatically removes the TXT record from your DNS. - -This automated process eliminates the need for manual intervention in domain validation, streamlining certificate issuance. - -## Guide - -In the following steps, we explore how to set up ACME Certificate Authority integration with Infisical using Let's Encrypt as an example. +In the following steps, we explore how to connect Infisical to an ACME-compatible CA. - - Before proceeding with the ACME CA registration, you need to set up an App Connection with the appropriate permissions for DNS validation: + + Before registering an ACME-compatible CA with Infisical, you need to set up an [App Connection](/integrations/app-connections/overview) with the appropriate permissions for Infisical to perform the DNS-01 challenge with your DNS provider. + + If you don’t see a specific DNS provider listed below or need a dedicated one, please reach out to sales@infisical.com and we’ll help get that enabled for you. - 1. Navigate to your Organization Settings > App Connections and create a new AWS connection. + 1. Navigate to your Certificate Management Project > App Connections and create a new AWS connection. 2. Ensure your AWS connection has the following minimum permissions for Route53 DNS validation: @@ -112,7 +109,7 @@ In the following steps, we explore how to set up ACME Certificate Authority inte For detailed instructions on setting up an AWS connection, see the [AWS Connection](/integrations/app-connections/aws) documentation. - 1. Navigate to your Organization Settings > App Connections and create a new Cloudflare connection. + 1. Navigate to your Certificate Management Project > App Connections and create a new Cloudflare connection. 2. Ensure your Cloudflare token has the following minimum permissions for DNS validation: @@ -125,51 +122,33 @@ In the following steps, we explore how to set up ACME Certificate Authority inte - + - - - To register an ACME CA, head to your Project > Internal PKI > Certificate Authorities and press the **+** button in the External Certificate Authorities section. + To register an ACME-compatible CA, head to your Certificate Management Project > Certificate Authorities > External Certificate Authorities and press **Create CA**. - ![pki register external ca](/images/platform/pki/ca/external-ca/create-external-ca-button.png) + ![pki register external ca](/images/platform/pki/ca/external-ca/create-external-ca-button.png) - Fill out the details for the ACME CA registration: + Here, set the **CA Type** to **ACME** and fill out details for it. - ![pki register external ca details](/images/platform/pki/ca/external-ca/create-external-ca-form.png) + ![pki register external ca details](/images/platform/pki/ca/external-ca/create-external-ca-form.png) - Here's guidance on each field: + Here's some guidance for each field: - - **Type**: Select "ACME" as the External CA type. - - **Name**: Enter a name for the ACME CA (e.g., "lets-encrypt-production"). - - **DNS App Connection**: Select from available DNS app connections or configure a new one. This connection provides Infisical with the credentials needed to create and remove DNS records for ACME validation. - - **Zone ID**: Enter the Zone ID for the domain(s) you'll be requesting certificates for. - - **Directory URL**: Enter the ACME v2 directory URL for your chosen CA provider (e.g., `https://acme-v02.api.letsencrypt.org/directory` for Let's Encrypt). - - **Account Email**: Email address to associate with your ACME account. This email will receive important notifications about your certificates. - - **Enable Direct Issuance**: Toggle on to allow direct certificate issuance without requiring subscribers. - - **EAB Key Identifier (KID)**: (Optional) The Key Identifier (KID) provided by your ACME CA for External Account Binding (EAB). This is required by some ACME providers (e.g., ZeroSSL, DigiCert) to link your ACME account to an external account you've pre-registered with them. - - **EAB HMAC Key**: (Optional) The HMAC Key provided by your ACME CA for External Account Binding (EAB). This key is used in conjunction with the KID to prove ownership of the external account during ACME account registration. + - Name: A slug-friendly name for the ACME-compatible CA such as `lets-encrypt-production`. + - DNS App Connection: The App Connection from Step 1 used for Infisical to connect to your DNS provider and create/remove DNS records as part of the DNS-01 challenge in ACME. + - Zone / Zone ID: Enter the Zone / Zone ID for the domain(s) you'll be requesting certificates for. + - Directory URL: Enter the **ACME Directory URL** for your desired upstream ACME-compatible CA such as `https://acme-v02.api.letsencrypt.org/directory` for Let's Encrypt. + - Account Email: The email address to associate with your ACME account. This email will receive important notifications about your certificates. + - EAB Key Identifier (KID): (Optional) The Key Identifier (KID) provided by your ACME CA for External Account Binding (EAB). This is required by some ACME providers (e.g., ZeroSSL, DigiCert) to link your ACME account to an external account you've pre-registered with them. + - EAB HMAC Key: (Optional) The HMAC Key provided by your ACME CA for External Account Binding (EAB). This key is used in conjunction with the KID to prove ownership of the external account during ACME account registration. - Finally, press **Create** to register the ACME CA with Infisical. - - - Once registered, your ACME CA will appear in the External Certificate Authorities section. + Finally, press **Create** to register the ACME-compatible CA with Infisical. - ![pki external ca list](/images/platform/pki/ca/external-ca/external-ca-list.png) - - From here, you can: - - - View the status of the ACME CA registration - - Edit the configuration settings - - Disable or re-enable the ACME CA - - Delete the ACME CA registration if no longer needed - - You can now use this ACME CA to issue certificates for your subscribers. - - + Great! You’ve successfully registered an external ACME-compatible CA with Infisical. Now check out the [Certificates](/documentation/platform/pki/certificates/overview) section to learn more about how to issue X.509 certificates using the ACME-compatible CA. - To register an ACME CA with Infisical using the API, make a request to the Create External CA endpoint: + To register an ACME CA with Infisical using the API, make a request to the [Create External CA](https://infisical.com/docs/api-reference/endpoints/certificate-authorities/acme/create) endpoint: ### Sample request @@ -227,78 +206,9 @@ In the following steps, we explore how to set up ACME Certificate Authority inte - - Next, create a subscriber that uses your ACME CA for certificate issuance. Navigate to your Project > Subscribers and create a new subscriber. - - Configure the subscriber with: - - **Issuing CA**: Select your registered ACME CA - - **Common Name**: The domain for which you want to issue certificates (e.g., `example.com`) - - **Alternative Names**: Additional domains to include in the certificate - - Check out the [Subscribers](/documentation/platform/pki/subscribers) page for detailed instructions on creating and managing subscribers. - - - Once your subscriber is configured, you can issue certificates either through the Infisical UI or programmatically via the API. - - When you request a certificate: - 1. Infisical generates a key pair for the certificate - 2. Sends a Certificate Signing Request (CSR) to the ACME CA - 3. Receives a DNS-01 challenge from the ACME provider - 4. Creates a TXT record in Route53/Cloudflare to satisfy the challenge - 5. Notifies the ACME provider that the challenge is ready for validation - 6. Once validated, the ACME provider issues the certificate - 7. Infisical stores and manages the certificate for your subscriber - - The certificate will be automatically renewed before expiration according to your subscriber configuration. - - - The issued certificate and private key are now available through Infisical and can be: - - - Downloaded directly from the Infisical UI - - Retrieved via the Infisical API for programmatic access using the [latest certificate bundle endpoint](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle) - -## Example: Let's Encrypt Integration - -Let's Encrypt is a free, automated, and open Certificate Authority that provides domain-validated SSL/TLS certificates. Here's how the integration works with Infisical: - -### Production Environment - -- **Directory URL**: `https://acme-v02.api.letsencrypt.org/directory` -- **Rate Limits**: 50 certificates per registered domain per week -- **Certificate Validity**: 90 days with automatic renewal -- **Trusted By**: All major browsers and operating systems - -### Staging Environment (for testing) - -- **Directory URL**: `https://acme-staging-v02.api.letsencrypt.org/directory` -- **Rate Limits**: Much higher limits for testing -- **Certificate Validity**: 90 days (not trusted by browsers) -- **Use Case**: Testing your ACME integration without hitting production rate limits - - - Always test your ACME integration using Let's Encrypt's staging environment - first. This allows you to verify your DNS configuration and certificate - issuance process without consuming your production rate limits. - - -## Example: DigiCert Integration - -DigiCert is a leading commercial Certificate Authority providing a wide range of trusted SSL/TLS certificates. Infisical can integrate with [DigiCert's ACME](https://docs.digicert.com/en/certcentral/certificate-tools/certificate-lifecycle-automation-guides/third-party-acme-integration/request-and-manage-certificates-with-acme.html) service to automate the provisioning and management of these certificates. - -- **Directory URL**: `https://acme.digicert.com/v2/acme/directory` -- **External Account Binding (EAB)**: Required. You will need a Key Identifier (KID) and HMAC Key from your DigiCert account to register the ACME CA in Infisical. -- **Certificate Validity**: Typically 90 days, with automatic renewal through Infisical. -- **Trusted By**: All major browsers and operating systems. - - - When integrating with DigiCert ACME, ensure you have obtained the necessary - External Account Binding (EAB) Key Identifier (KID) and HMAC Key from your - DigiCert account. - - ## FAQ @@ -325,17 +235,8 @@ DigiCert is a leading commercial Certificate Authority providing a wide range of - Reduce the impact of compromised certificates - Ensure systems stay up-to-date with certificate management practices - When configured, Infisical automatically handles certificate renewal for subscribers. - - Yes! You can register multiple ACME CAs in the same project: - - - Different providers for different domains or use cases - - Staging and production environments for the same provider - - Backup providers for redundancy - - Each subscriber can be configured to use a specific ACME CA based on your requirements. - + Yes. You can register multiple ACME CAs in the same project. diff --git a/docs/documentation/platform/pki/ca/digicert.mdx b/docs/documentation/platform/pki/ca/digicert.mdx new file mode 100644 index 0000000000..b83c863200 --- /dev/null +++ b/docs/documentation/platform/pki/ca/digicert.mdx @@ -0,0 +1,16 @@ +--- +title: "DigiCert" +description: "Learn how to connect Infisical to DigiCert to issue certificates." +--- + +## Concept + +Infisical can connect to [DigiCert](https://www.digicert.com/) using the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) to issue certificates back to your end-entities. + +## Guide to Connecting Infisical to DigiCert CA + +To connect Infisical to DigiCert, follow the steps in the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) guide but use the DigiCert **ACME Directory URL**: `https://acme.digicert.com/v2/acme/directory`. + +DigiCert requires **External Account Binding (EAB)** for all ACME registrations. You will need to obtain both a Key Identifier (KID) and an HMAC Key from your DigiCert account before registering the ACME CA in Infisical. + +DigiCert typically issues certificates with a 90-day validity period. diff --git a/docs/documentation/platform/pki/ca/external-ca.mdx b/docs/documentation/platform/pki/ca/external-ca.mdx index 1dc89ec96c..7e57d50a14 100644 --- a/docs/documentation/platform/pki/ca/external-ca.mdx +++ b/docs/documentation/platform/pki/ca/external-ca.mdx @@ -6,7 +6,7 @@ description: "Learn how to connect External Certificate Authorities with Infisic ## Concept -Infisical lets you integrate with External Certificate Authorities (CAs), allowing you to use existing PKI infrastructure or connect to public CAs to issue digital certificates for your end-entities. +Infisical lets you integrate with External Certificate Authorities (CAs), allowing you to use existing PKI infrastructure or connect to public CAs to issue certificates for your end-entities.
@@ -23,7 +23,7 @@ As shown above, these CAs commonly fall under two categories: - External Private CAs: CAs like AWS Private CA, HashiCorp Vault PKI, Azure ADCS, etc. that are privately owned and are used to issue certificates for internal services; these are often either cloud-hosted private CAs or on-prem / enterprise CAs. - External Public CAs: CAs like Let's Encrypt, DigiCert, GlobalSign, etc. that are publicly trusted and are used to issue certificates for public-facing services. -Note that Infisical can also act as an _ACME client_, allowing you to integrate upstream with any ACME-compatible CA to automate certificate issuance and renewal. +Note that Infisical can act as an _ACME client_, allowing you to integrate upstream with any [ACME-compatible CA](/documentation/platform/pki/ca/acme-ca) to automate certificate issuance and renewal. ## Workflow diff --git a/docs/documentation/platform/pki/ca/lets-encrypt.mdx b/docs/documentation/platform/pki/ca/lets-encrypt.mdx new file mode 100644 index 0000000000..75c7988de5 --- /dev/null +++ b/docs/documentation/platform/pki/ca/lets-encrypt.mdx @@ -0,0 +1,16 @@ +--- +title: "Let's Encrypt" +description: "Learn how to connect Infisical to Let's Encrypt to issue certificates." +--- + +## Concept + +Infisical can connect to [Let's Encrypt](https://letsencrypt.org/) using the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) to issue certificates back to your end-entities. + +## Guide to Connecting Infisical to Let's Encrypt CA + +To connect Infisical to Let's Encrypt, follow the steps in the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) guide but use the Let's Encrypt **ACME Directory URL**: `https://acme-v02.api.letsencrypt.org/directory`. + +Note that Let’s Encrypt issues 90-day certificates and enforces a limit of 50 certificates per registered domain per week. + +We strongly recommend testing your setup against the Let's Encrypt staging environment first at the **ACME Directory URL** `https://acme-staging-v02.api.letsencrypt.org/directory` prior to switching to the production environment. This allows you to verify your DNS configuration and certificate issuance process without consuming production rate limits. diff --git a/docs/documentation/platform/pki/certificates/certificates.mdx b/docs/documentation/platform/pki/certificates/certificates.mdx index 05703beeee..abe1987500 100644 --- a/docs/documentation/platform/pki/certificates/certificates.mdx +++ b/docs/documentation/platform/pki/certificates/certificates.mdx @@ -22,10 +22,7 @@ where you can manage various aspects of its lifecycle including deployment to cl To issue a certificate, you must first create a [certificate profile](/documentation/platform/pki/certificates/profiles) and a [certificate template](/documentation/platform/pki/certificates/templates) to go along with it. The [enrollment method](/documentation/platform/pki/enrollment-methods/overview) configured on the certificate profile determines how a certificate is issued for it. -Refer to the documentation for each enrollment method below to learn more about how to issue certificates using it. - -- [API](/documentation/platform/pki/enrollment-methods/api): Issue a certificate over UI or by making an API request to Infisical. -- [EST](/documentation/platform/pki/enrollment-methods/est): Issue a certificate over the EST protocol. +Refer to the documentation for each enrollment method to learn more about how to issue certificates using it. ## Guide to Renewing Certificates @@ -49,24 +46,33 @@ Note that server-driven certificate renewal is only available for certificates i A certificate can be considered for auto-renewal at time of issuance if the **Enable Auto-Renewal By Default** option is selected on its [certificate profile](/documentation/platform/pki/certificates/profiles) or after issuance by toggling this option manually. - For server-driven certificate renewal workflows, you can programmatically fetch the latest active certificate bundle for a certificate profile using the [Get Latest Active Certificate Bundle](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle) API endpoint. - - This ensures you always retrieve the most current valid certificate, including any that have been automatically renewed, making it particularly useful for deployment pipelines and automation workflows where you don't want to track individual serial numbers. + For server-driven certificate renewal workflows, you can programmatically + fetch the latest active certificate bundle for a certificate profile using the + [Get Latest Active Certificate + Bundle](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle) + API endpoint. This ensures you always retrieve the most current valid + certificate, including any that have been automatically renewed, making it + particularly useful for deployment pipelines and automation workflows where + you don't want to track individual serial numbers. The following examples demonstrate different approaches to certificate renewal: -- Using the ACME enrollment method, you may connect an ACME client like [certbot](https://certbot.eff.org/) to fetch back and renew certificates for Apache, Nginx, or other server. The ACME client will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to the server's configuration. -- Using the ACME enrollment method, you may use [cert-manager](https://cert-manager.io/) with Infisical to issue and renew certificates for Kubernetes workloads; cert-manager will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to Kubernetes secrets. -- Using the API enrollment method, you may push and auto-renew certificates to AWS and Azure using [certificate syncs](/documentation/platform/pki/certificate-syncs/overview). Certificates issued over the API enrollment method, where key pairs are generated server-side, are also eligible for server-side auto-renewal; once renewed, certificates are automatically pushed back to their sync destination. +- Using the [ACME enrollment method](/documentation/platform/pki/enrollment-methods/acme), you may connect an ACME client like [certbot](https://certbot.eff.org/) to fetch back and renew certificates for [Apache](/documentation/platform/pki/integration-guides/apache-certbot), [Nginx](/documentation/platform/pki/integration-guides/nginx-certbot), or other server. The ACME client will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to the server's configuration. +- Using the [ACME enrollment method](/documentation/platform/pki/enrollment-methods/acme), you may use [cert-manager](https://cert-manager.io/) with Infisical to issue and renew certificates for Kubernetes workloads; cert-manager will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to Kubernetes secrets. +- Using the [API enrollment method](/documentation/platform/pki/enrollment-methods/api), you may push and auto-renew certificates to AWS and Azure using [certificate syncs](/documentation/platform/pki/certificate-syncs/overview). Certificates issued over the API enrollment method, where key pairs are generated server-side, are also eligible for server-side auto-renewal; once renewed, certificates are automatically pushed back to their sync destination. -## Guide to Exporting Certificates +## Guide to Downloading Certificates -In the following steps, we explore how to export certificates from Infisical in different formats for use in your applications and infrastructure. +In the following steps, we explore different options for exporting already-issued certificates from Infisical in different formats for use in your applications and infrastructure. -### Accessing the Export Certificate Modal +### Download Latest Profile Certificate -To export any certificate, first navigate to your project's certificate inventory and locate the certificate you want to export. Click on the **Export Certificate** option from the certificate's action menu. +You can download the latest certificate issued against a [certificate profile](/documentation/platform/pki/certificates/profiles) using the [latest certificate bundle](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle) endpoint. + +### Download Specific Certificate + +To export a specific certificate, first navigate to your project's certificate inventory and locate the certificate you want to export. Click on the **Export Certificate** option from the certificate's action menu. ![pki export certificate option](/images/platform/pki/certificate/cert-export-option.png) @@ -108,6 +114,7 @@ To export any certificate, first navigate to your project's certificate inventor ``` + @@ -158,6 +165,7 @@ To export any certificate, first navigate to your project's certificate inventor + diff --git a/docs/documentation/platform/pki/certificates/templates.mdx b/docs/documentation/platform/pki/certificates/templates.mdx index 38b5570ddc..b8d976961d 100644 --- a/docs/documentation/platform/pki/certificates/templates.mdx +++ b/docs/documentation/platform/pki/certificates/templates.mdx @@ -7,7 +7,7 @@ sidebarTitle: "Templates" A certificate template is a policy structure specifying permitted attributes for requested certificates. This includes constraints around subject naming conventions, SAN fields, key usages, and extended key usages. -Each certificate requested against a profile is validated against the template bound to that profile. If the request fails any criteria included in the template, the certificate is not issued. This helps administrators enforce uniformity and security standards across all issued certificates. +Each certificate requested against a [certificate profile](/documentation/platform/pki/certificates/profiles) is validated against the template bound to that profile. If the request fails any criteria included in the template, the certificate is not issued. This helps administrators enforce uniformity and security standards across all issued certificates. ## Guide to Creating a Certificate Template diff --git a/docs/documentation/platform/pki/enrollment-methods/acme.mdx b/docs/documentation/platform/pki/enrollment-methods/acme.mdx index 559b0cab81..12c4779b5e 100644 --- a/docs/documentation/platform/pki/enrollment-methods/acme.mdx +++ b/docs/documentation/platform/pki/enrollment-methods/acme.mdx @@ -3,6 +3,62 @@ title: "Certificate Enrollment via ACME" sidebarTitle: "ACME" --- - - ACME-based certificate enrollment is currently under development and will be included in a future release. - +## Concept + +The ACME enrollment method allows you to issue and manage certificates against a specific [certificate profile](/documentation/platform/pki/certificates/profiles) using the [ACME protocol](https://en.wikipedia.org/wiki/Automatic_Certificate_Management_Environment). +This method is suitable for web servers, load balancers, and other general-purpose servers that can run an [ACME client](https://letsencrypt.org/docs/client-options/) for automated certificate management. + +Infisical's ACME enrollment method is based on [RFC 8555](https://datatracker.ietf.org/doc/html/rfc8555/). + +## Prerequisites + +Install an [ACME client](https://letsencrypt.org/docs/client-options/) onto your server. This client will handle [ACME challenges](https://letsencrypt.org/docs/challenge-types/) and request/renew certificates from Infisical. + +## Guide to Certificate Enrollment via ACME + +In the following steps, we explore how to issue a X.509 certificate using the ACME enrollment method. + + + + Create a [certificate + profile](/documentation/platform/pki/certificates/profiles) with **ACME** + selected as the enrollment method. + + ![pki acme config](/images/platform/pki/enrollment-methods/acme/acme-config.png) + + + + Once you've created the certificate profile, you can obtain its ACME configuration details by clicking the **Reveal ACME EAB** option on the profile. + + ![pki acme eab config](/images/platform/pki/enrollment-methods/acme/acme-eab.png) + + From the ACME configuration, gather the following values: + + - ACME Directory URL: The URL that the ACME client will use to communicate with Infisical's ACME server. + - EAB Key Identifier (KID): A unique identifier that tells Infisical which ACME account is making the request. + - EAB Secret: A secret key that authenticates your ACME client with Infisical. + + + + Provide the **ACME Directory URL**, **EAB KID**, and **EAB Secret** from Step 2 to your ACME client to authenticate with Infisical and request a certificate. + + For example, if using [Certbot](https://certbot.eff.org/) as an ACME client, you can configure and start requesting certificates with the following command: + + ```bash + sudo certbot certonly \ + --standalone \ + --server "https://your-infisical-instance.com/api/v1/pki/certificate-profiles/{profile-id}/acme/directory" \ + --eab-kid "your-eab-kid" \ + --eab-hmac-key "your-eab-secret" \ + -d example.infisical.com \ + --email admin@example.com \ + --agree-tos \ + --non-interactive + ``` + + Certbot stores the private key and resulting leaf certificate and full certificate chain in `/etc/letsencrypt/live/{domain-name}/`. + + For client-specific setup and usage instructions, refer to the documentation for your ACME client. + + + diff --git a/docs/documentation/platform/pki/enrollment-methods/api.mdx b/docs/documentation/platform/pki/enrollment-methods/api.mdx index 4adcdc01b0..304bafefc1 100644 --- a/docs/documentation/platform/pki/enrollment-methods/api.mdx +++ b/docs/documentation/platform/pki/enrollment-methods/api.mdx @@ -5,7 +5,7 @@ sidebarTitle: "API" ## Concept -The API enrollment method allows you to issue certificates against a specific certificate profile over Web UI or by making an API request to Infisical. +The API enrollment method allows you to issue certificates against a specific [certificate profile](/documentation/platform/pki/certificates/profiles) over Web UI or by making an API request to Infisical. ## Guide to Certificate Enrollment via API @@ -15,7 +15,7 @@ In the following steps, we explore how to issue a X.509 certificate using the AP - + Create a [certificate profile](/documentation/platform/pki/certificates/profiles) with **API** selected as the enrollment method. @@ -54,7 +54,7 @@ Here, select the certificate profile from step 1 that will be used to issue the - + To create a certificate [profile](/documentation/platform/pki/certificates/profiles), make an API request to the [Create Certificate Profile](/api-reference/endpoints/certificate-profiles/create) API endpoint. diff --git a/docs/documentation/platform/pki/enrollment-methods/est.mdx b/docs/documentation/platform/pki/enrollment-methods/est.mdx index a4e463a2f4..35e7705955 100644 --- a/docs/documentation/platform/pki/enrollment-methods/est.mdx +++ b/docs/documentation/platform/pki/enrollment-methods/est.mdx @@ -5,7 +5,7 @@ sidebarTitle: "EST" ## Concept -The API enrollment method allows you to issue and manage certificates against a specific certificate profile using the [EST protocol](https://en.wikipedia.org/wiki/Enrollment_over_Secure_Transport). +The EST enrollment method allows you to issue and manage certificates against a specific [certificate profile](/documentation/platform/pki/certificates/profiles) using the [EST protocol](https://en.wikipedia.org/wiki/Enrollment_over_Secure_Transport). This method is suitable for environments requiring strong authentication and encrypted communication, such as in IoT, enterprise networks, and secure web services. Infisical's EST service is based on [RFC 7030](https://datatracker.ietf.org/doc/html/rfc7030) and implements the following endpoints: @@ -32,7 +32,7 @@ and structured under `https://app.infisical.com:8443/.well-known/est/{profile_id In the following steps, we explore how to issue a X.509 certificate using the EST enrollment method. - + Create a [certificate profile](/documentation/platform/pki/certificates/profiles) with **EST** selected as the enrollment method and fill in EST-specific configuration. diff --git a/docs/documentation/platform/pki/enrollment-methods/overview.mdx b/docs/documentation/platform/pki/enrollment-methods/overview.mdx index f1af9375d1..df203c35cc 100644 --- a/docs/documentation/platform/pki/enrollment-methods/overview.mdx +++ b/docs/documentation/platform/pki/enrollment-methods/overview.mdx @@ -5,7 +5,10 @@ sidebarTitle: "Overview" Enrollment methods determine how certificates are issued and managed for a [certificate profile](/documentation/platform/pki/certificates/profiles). -Refer to the documentation for each enrollment method to learn more about how to enroll certificates using it. +Refer to the documentation for each enrollment method below to learn more about how to enroll certificates using it. - [API](/documentation/platform/pki/enrollment-methods/api): Enroll certificates via API. -- [EST](/documentation/platform/pki/enrollment-methods/est): Enroll certificates via EST protocol. +- [ACME](/documentation/platform/pki/enrollment-methods/acme): Enroll certificates using the ACME protocol. +- [EST](/documentation/platform/pki/enrollment-methods/est): Enroll certificates using the EST protocol. + +Note that beyond using an enrollment method, you can also deliver a certificate to a target destination using supported [certificate syncs](https://infisical.com/docs/documentation/platform/pki/certificate-syncs/overview). diff --git a/docs/documentation/platform/pki/overview.mdx b/docs/documentation/platform/pki/overview.mdx index 9aba032b1f..9d208039b5 100644 --- a/docs/documentation/platform/pki/overview.mdx +++ b/docs/documentation/platform/pki/overview.mdx @@ -12,7 +12,7 @@ Core capabilities include: - [Private CA](/documentation/platform/pki/ca/private-ca): Create and manage your own private CA hierarchy including root and intermediate CAs. - [External CA integration](/documentation/platform/pki/ca/external-ca): Integrate with external public and private CAs including [Azure ADCS](/documentation/platform/pki/ca/azure-adcs) and [ACME-compatible CAs](/documentation/platform/pki/ca/acme-ca) like Let's Encrypt and DigiCert. -- [Certificate Enrollment](/documentation/platform/pki/enrollment-methods/overview): Support enrollment methods including [API](/documentation/platform/pki/enrollment-methods/api), ACME, [EST](/documentation/platform/pki/enrollment-methods/est), and more to automate certificate issuance for services, devices, and workloads. +- [Certificate Enrollment](/documentation/platform/pki/enrollment-methods/overview): Support enrollment methods including [API](/documentation/platform/pki/enrollment-methods/api), [ACME](/documentation/platform/pki/enrollment-methods/acme), [EST](/documentation/platform/pki/enrollment-methods/est), and more to automate certificate issuance for services, devices, and workloads. - Certificate Inventory: Track and monitor issued X.509 certificates, maintaining a comprehensive inventory of all active and expired certificates. - Certificate Lifecycle Automation: Automate issuance, [renewal](/documentation/platform/pki/certificates/certificates#guide-to-renewing-certificates), and [revocation](/documentation/platform/pki/certificates/certificates#guide-to-revoking-certificates) with policy-based workflows, ensuring certificates remain valid, compliant, and up to date across your infrastructure. - [Certificate Syncs](/documentation/platform/pki/certificate-syncs/overview): Push certificates to cloud certificate managers like [AWS Certificate Manager](/documentation/platform/pki/certificate-syncs/aws-certificate-manager) and [Azure Key Vault](/documentation/platform/pki/certificate-syncs/azure-key-vault). diff --git a/docs/images/platform/pki/ca/external-ca/create-external-ca-button.png b/docs/images/platform/pki/ca/external-ca/create-external-ca-button.png index bda7822a0b..67df534cb2 100644 Binary files a/docs/images/platform/pki/ca/external-ca/create-external-ca-button.png and b/docs/images/platform/pki/ca/external-ca/create-external-ca-button.png differ diff --git a/docs/images/platform/pki/ca/external-ca/create-external-ca-form.png b/docs/images/platform/pki/ca/external-ca/create-external-ca-form.png index bc32c23f69..dc993d9f0a 100644 Binary files a/docs/images/platform/pki/ca/external-ca/create-external-ca-form.png and b/docs/images/platform/pki/ca/external-ca/create-external-ca-form.png differ diff --git a/docs/images/platform/pki/ca/external-ca/external-ca-list.png b/docs/images/platform/pki/ca/external-ca/external-ca-list.png deleted file mode 100644 index 4ef05c0598..0000000000 Binary files a/docs/images/platform/pki/ca/external-ca/external-ca-list.png and /dev/null differ diff --git a/docs/images/platform/pki/enrollment-methods/acme/acme-config.png b/docs/images/platform/pki/enrollment-methods/acme/acme-config.png new file mode 100644 index 0000000000..11ea8b075b Binary files /dev/null and b/docs/images/platform/pki/enrollment-methods/acme/acme-config.png differ diff --git a/docs/images/platform/pki/enrollment-methods/acme/acme-eab.png b/docs/images/platform/pki/enrollment-methods/acme/acme-eab.png new file mode 100644 index 0000000000..d2bffd0012 Binary files /dev/null and b/docs/images/platform/pki/enrollment-methods/acme/acme-eab.png differ diff --git a/docs/integrations/secret-syncs/vercel.mdx b/docs/integrations/secret-syncs/vercel.mdx index 74cffbc11b..de83a20681 100644 --- a/docs/integrations/secret-syncs/vercel.mdx +++ b/docs/integrations/secret-syncs/vercel.mdx @@ -43,6 +43,9 @@ description: "Learn how to configure a Vercel Sync for Infisical." - **Overwrite Destination Secrets**: Removes any secrets at the destination endpoint not present in Infisical. - **Import Secrets (Prioritize Infisical)**: Imports secrets from the destination endpoint before syncing, prioritizing values from Infisical over Vercel when keys conflict. - **Import Secrets (Prioritize Vercel)**: Imports secrets from the destination endpoint before syncing, prioritizing values from Vercel over Infisical when keys conflict. + + Vercel does not expose the values of [sensitive environment variables](https://vercel.com/docs/environment-variables/sensitive-environment-variables), so Infisical cannot import them during the initial sync. As a result, these secrets are created in Infisical with empty values. After the first sync, you'll need to manually re-enter their values in Infisical to ensure both platforms stay aligned. + - **Key Schema**: Template that determines how secret names are transformed when syncing, using `{{secretKey}}` as a placeholder for the original secret name and `{{environment}}` for the environment. We highly recommend using a Key Schema to ensure that Infisical only manages the specific keys you intend, keeping everything else untouched. @@ -149,4 +152,5 @@ description: "Learn how to configure a Vercel Sync for Infisical." } ``` + diff --git a/docs/sdks/languages/dotnet.mdx b/docs/sdks/languages/dotnet.mdx index 7b3d128853..b02aa9d752 100644 --- a/docs/sdks/languages/dotnet.mdx +++ b/docs/sdks/languages/dotnet.mdx @@ -118,6 +118,22 @@ var _ = await sdk.Auth().UniversalAuth().LoginAsync( - `clientId` (string): The client ID of your Machine Identity. - `clientSecret` (string): The client secret of your Machine Identity. +### LDAP Auth + +#### Authenticating +```cs +var _ = await sdk.Auth().LdapAuth().LoginAsync( + "IDENTITY_ID", + "USERNAME", + "PASSWORD" +); +``` + +**Parameters:** +- `identityId` (string): The ID of your Machine Identity . +- `username` (string): The LDAP username for authentication. +- `password` (string): The LDAP password for authentication. + ### `Secrets()` The `Secrets()` sub-class handles operations related to the Infisical secrets management product. diff --git a/docs/sdks/languages/go.mdx b/docs/sdks/languages/go.mdx index 26f8a36d3d..88dc6cf3b1 100644 --- a/docs/sdks/languages/go.mdx +++ b/docs/sdks/languages/go.mdx @@ -284,6 +284,114 @@ if err != nil { } ``` +#### JWT Auth + + + Please note that this authentication method requires a valid JWT token from your JWT issuer. Please [read + more](/documentation/platform/identities/jwt-auth) about this authentication + method. + + +**Using the SDK** + +```go +credential, err := client.Auth().JwtAuthLogin("MACHINE_IDENTITY_ID", "JWT_TOKEN") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +#### LDAP Auth + + + Please note that this authentication method requires LDAP credentials. Please [read + more](/documentation/platform/identities/ldap-auth/general) about this authentication + method. + + +**Using environment variables** + +You can set the `INFISICAL_LDAP_AUTH_IDENTITY_ID` environment variable and pass empty string for the identity ID: + +```go +credential, err := client.Auth().LdapAuthLogin("", "LDAP_USERNAME", "LDAP_PASSWORD") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +**Using the SDK directly** + +```go +credential, err := client.Auth().LdapAuthLogin("MACHINE_IDENTITY_ID", "LDAP_USERNAME", "LDAP_PASSWORD") + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +#### OCI Auth + + + Please note that this authentication method will only work if you're running + your application on Oracle Cloud Infrastructure. Please [read + more](/documentation/platform/identities/oci-auth) about this authentication + method. + + +**Using environment variables** + +You can set the `INFISICAL_OCI_AUTH_IDENTITY_ID` environment variable and omit the `IdentityID` field: + +```go +credential, err := client.Auth().OciAuthLogin(infisical.OciAuthLoginOptions{ + UserID: "USER_OCID", + TenancyID: "TENANCY_OCID", + Fingerprint: "FINGERPRINT", + PrivateKey: "PRIVATE_KEY", + Region: "REGION", +}) + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +**Using the SDK directly** + +```go +credential, err := client.Auth().OciAuthLogin(infisical.OciAuthLoginOptions{ + IdentityID: "MACHINE_IDENTITY_ID", + UserID: "USER_OCID", + TenancyID: "TENANCY_OCID", + Fingerprint: "FINGERPRINT", + PrivateKey: "PRIVATE_KEY", + Region: "REGION", + Passphrase: nil, // Optional: pointer to string if your private key has a passphrase +}) + +if err != nil { + fmt.Println(err) + os.Exit(1) +} +``` + +**OciAuthLoginOptions fields:** + +- `IdentityID` (string) - Your Infisical Machine Identity ID. Can be set via `INFISICAL_OCI_AUTH_IDENTITY_ID` environment variable. +- `UserID` (string) - Your OCI user OCID. +- `TenancyID` (string) - Your OCI tenancy OCID. +- `Fingerprint` (string) - Your OCI API key fingerprint. +- `PrivateKey` (string) - Your OCI private key (PEM format). +- `Region` (string) - Your OCI region (e.g., `us-ashburn-1`). +- `Passphrase` (*string) - Optional: pointer to passphrase string if your private key is encrypted. + ## Secrets ### List Secrets diff --git a/docs/self-hosting/ee.mdx b/docs/self-hosting/ee.mdx index 83b2772e49..9c3e7d644c 100644 --- a/docs/self-hosting/ee.mdx +++ b/docs/self-hosting/ee.mdx @@ -14,14 +14,13 @@ This guide walks through how you can use these paid features on a self-hosted in Once purchased, you will be issued a license key. - Depending on whether or not the environment where Infisical is deployed has internet access, you may be issued a regular license or an offline license. + Set your license key as the value of the **LICENSE_KEY** environment variable within your Infisical instance. - - Assign the issued license key to the `LICENSE_KEY` environment variable in your Infisical instance. - - Your Infisical instance will need to communicate with the Infisical license server to validate the license key. + - Your Infisical instance will need to communicate with the Infisical license server to validate the license key. If you want to limit outgoing connections only to the Infisical license server, you can use the following IP addresses: `13.248.249.247` and `35.71.190.59` @@ -29,16 +28,18 @@ This guide walks through how you can use these paid features on a self-hosted in - - Assign the issued license key to the `LICENSE_KEY_OFFLINE` environment variable in your Infisical instance. + - Assign the issued offline license key to the `LICENSE_KEY` environment variable in your Infisical instance. + + - The system will automatically detect that it's an offline license based on the key format. - How you set the environment variable will depend on the deployment method you used. Please refer to the documentation of your deployment method for specific instructions. + While the LICENSE_KEY_OFFLINE environment variable continues to be supported for compatibility with existing configurations, we recommend transitioning to LICENSE_KEY for all license types going forward. - Once your instance starts up, the license key will be validated and you’ll be able to use the paid features. + Once your instance starts up, the license key will be validated and you'll be able to use the paid features. However, when the license expires, Infisical will continue to run, but EE features will be disabled until the license is renewed or a new one is purchased. - + diff --git a/docs/self-hosting/guides/production-hardening.mdx b/docs/self-hosting/guides/production-hardening.mdx index dfd7b575fd..8982d6b622 100644 --- a/docs/self-hosting/guides/production-hardening.mdx +++ b/docs/self-hosting/guides/production-hardening.mdx @@ -694,4 +694,20 @@ For enterprise deployments requiring compliance certifications: ### Standards Compliance -**FIPS 140-3 Compliance**. Infisical is actively working on FIPS 140-3 compliance to meet U.S. and Canadian government cryptographic standards. This will provide validated cryptographic modules for organizations requiring certified encryption implementations. +#### FIPS 140-3 Compliance + +Infisical is compliant with FIPS 140-3, meeting U.S. and Canadian government cryptographic standards through validated cryptographic modules. +This certification is designed for organizations that require government-approved encryption implementations. +To deploy a FIPS-compliant instance, use the [infisical/infisical-fips](https://hub.docker.com/r/infisical/infisical-fips) Docker image, available to Enterprise customers. +Our FIPS 140-3 attestation letter is available in the [Infisical Trust Center](https://trust.infisical.com/). + +#### SOC 2 Compliance + +Infisical is SOC 2 compliant, demonstrating adherence to rigorous security, availability, and confidentiality standards established by the American Institute of CPAs (AICPA). +This certification validates our security controls and operational practices for organizations requiring third-party audited security assurance. Our SOC 2 report is available in the [Infisical Trust Center](https://trust.infisical.com/). + +#### HIPAA Compliance + +Infisical is HIPAA compliant, meeting the security and privacy requirements of the Health Insurance Portability and Accountability Act. +This compliance framework ensures appropriate safeguards for protected health information (PHI) for healthcare organizations and their business associates. +Our HIPAA certification is available in the [Infisical Trust Center](https://trust.infisical.com/). \ No newline at end of file diff --git a/frontend/public/locales/en/translations.json b/frontend/public/locales/en/translations.json index de3b99f11a..fa23b9630b 100644 --- a/frontend/public/locales/en/translations.json +++ b/frontend/public/locales/en/translations.json @@ -41,7 +41,6 @@ "common": { "head-title": "{{title}} | Infisical", "error_project-already-exists": "A project with this name already exists.", - "no-mobile": " To use Infisical, please log in through a device with larger dimensions. ", "email": "Email", "password": "Password", "first-name": "First Name", diff --git a/frontend/public/locales/es/translations.json b/frontend/public/locales/es/translations.json index d7ee331f41..1d217b89d5 100644 --- a/frontend/public/locales/es/translations.json +++ b/frontend/public/locales/es/translations.json @@ -41,7 +41,6 @@ "common": { "head-title": "{{title}} | Infisical", "error_project-already-exists": "Ya existe un proyecto con este nombre.", - "no-mobile": "Para usar Infisical, inicia sesión con un dispositivo de mayores dimesiones.", "email": "Correo electrónico", "password": "Contraseña", "first-name": "Nombre", diff --git a/frontend/public/locales/fr/translations.json b/frontend/public/locales/fr/translations.json index 60d5cf8cf0..538b700c65 100644 --- a/frontend/public/locales/fr/translations.json +++ b/frontend/public/locales/fr/translations.json @@ -41,7 +41,6 @@ "common": { "head-title": "{{title}} | Infisical", "error_project-already-exists": "Un projet avec ce nom existe déjà.", - "no-mobile": " Pour utiliser Infisical, veuillez vous connecter avec un appareil avec des dimensions plus grandes. ", "email": "Email", "password": "Mot de passe", "first-name": "Prénom", diff --git a/frontend/public/locales/ko/translations.json b/frontend/public/locales/ko/translations.json index e8169eabad..a9eb653cc2 100644 --- a/frontend/public/locales/ko/translations.json +++ b/frontend/public/locales/ko/translations.json @@ -30,7 +30,6 @@ "common": { "head-title": "{{title}} | Infisical", "error_project-already-exists": "동일한 이름을 가진 프로젝트가 이미 존재해요.", - "no-mobile": " Infisical을 사용하려면, 큰 화면을 가진 디바이스로 로그인하여 주세요.", "email": "메일", "password": "비밀번호", "first-name": "이름", diff --git a/frontend/public/locales/pt-BR/translations.json b/frontend/public/locales/pt-BR/translations.json index a491d3d74a..9a0ab6768f 100644 --- a/frontend/public/locales/pt-BR/translations.json +++ b/frontend/public/locales/pt-BR/translations.json @@ -41,7 +41,6 @@ "common": { "head-title": "{{title}} | Infisical", "error_project-already-exists": "Já exite um projeto com este nome.", - "no-mobile": "Para usar o Infisical, faça o login através de um dispositivo com dimensões maiores.", "email": "Email", "password": "Senha", "first-name": "Primeiro Nome", diff --git a/frontend/public/locales/tr/translations.json b/frontend/public/locales/tr/translations.json index 93f228f96f..5a564eb605 100644 --- a/frontend/public/locales/tr/translations.json +++ b/frontend/public/locales/tr/translations.json @@ -41,7 +41,6 @@ "common": { "head-title": "{{title}} | Infisical", "error_project-already-exists": "Bu isimle bir proje zaten mevcut.", - "no-mobile": " Infisical'ı kullanmak için, lütfen daha büyük boyutlara sahip bir cihaz üzerinden giriş yapın. ", "email": "Email", "password": "Şifre", "first-name": "Adınız", diff --git a/frontend/src/components/project/ProjectOverviewChangeSection.tsx b/frontend/src/components/project/ProjectOverviewChangeSection.tsx index 455b0806a9..dba9179151 100644 --- a/frontend/src/components/project/ProjectOverviewChangeSection.tsx +++ b/frontend/src/components/project/ProjectOverviewChangeSection.tsx @@ -76,8 +76,8 @@ export const ProjectOverviewChangeSection = ({ showSlugField = false }: Props) = return (
-
-

Project Overview

+
+

Project Overview

-
- -

- {` ${t("common.no-mobile")} `} -

-
); diff --git a/frontend/src/layouts/KmsLayout/KmsLayout.tsx b/frontend/src/layouts/KmsLayout/KmsLayout.tsx index c4fdf32eb0..eef78000a4 100644 --- a/frontend/src/layouts/KmsLayout/KmsLayout.tsx +++ b/frontend/src/layouts/KmsLayout/KmsLayout.tsx @@ -13,7 +13,7 @@ export const KmsLayout = () => { const location = useLocation(); return ( -
+
{ const { popUp, handlePopUpToggle } = usePopUp(["createOrg"] as const); - const { t } = useTranslation(); - const containerHeight = config.pageFrameContent ? "h-[94vh]" : "h-screen"; const { data: serverDetails, isLoading } = useFetchServerStatus(); @@ -38,7 +33,7 @@ export const OrganizationLayout = () => { <>