Merge remote-tracking branch 'origin/main' into feat/PKI-29

This commit is contained in:
Carlos Monastyrski
2025-11-20 16:10:49 -03:00
171 changed files with 5153 additions and 1991 deletions

1
.gitignore vendored
View File

@@ -74,3 +74,4 @@ cli/test/infisical-merge
backend/bdd/.bdd-infisical-bootstrap-result.json
/npm/bin
__pycache__

View File

@@ -3,6 +3,7 @@ import os
import pathlib
import typing
from copy import deepcopy
import httpx
from behave.runner import Context
@@ -185,28 +186,33 @@ def bootstrap_infisical(context: Context):
def before_all(context: Context):
base_vars = {
"BASE_URL": BASE_URL,
"PEBBLE_URL": PEBBLE_URL,
}
if BOOTSTRAP_INFISICAL:
details = bootstrap_infisical(context)
context.vars = {
"BASE_URL": BASE_URL,
"PEBBLE_URL": PEBBLE_URL,
vars = base_vars | {
"PROJECT_ID": details["project"]["id"],
"CERT_CA_ID": details["ca"]["id"],
"CERT_TEMPLATE_ID": details["cert_template"]["id"],
"AUTH_TOKEN": details["auth_token"],
}
else:
context.vars = {
"BASE_URL": BASE_URL,
"PEBBLE_URL": PEBBLE_URL,
vars = base_vars | {
"PROJECT_ID": PROJECT_ID,
"CERT_CA_ID": CERT_CA_ID,
"CERT_TEMPLATE_ID": CERT_TEMPLATE_ID,
"AUTH_TOKEN": AUTH_TOKEN,
}
context._initial_vars = vars
context.http_client = httpx.Client(base_url=BASE_URL)
def before_scenario(context: Context, scenario: typing.Any):
context.vars = deepcopy(context._initial_vars)
def after_scenario(context: Context, scenario: typing.Any):
if hasattr(context, "web_server"):
context.web_server.shutdown_and_server_close()

View File

@@ -221,7 +221,6 @@ Feature: Access Control
| order | .authorizations[0].uri | auth_uri | {auth_uri} | |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | {} |
Scenario Outline: URL mismatch
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
@@ -271,3 +270,52 @@ Feature: Access Control
| order | .authorizations[0].uri | auth_uri | {auth_uri} | https://example.com/acmes/auths/FOOBAR | URL mismatch in the protected header |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | BAD | Invalid URL in the protected header |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} | https://example.com/acmes/challenges/FOOBAR | URL mismatch in the protected header |
Scenario Outline: Send KID and JWK in the same time
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri with jq "capture("/(?<id>[^/]+)$") | .id" as account_id
When I create certificate signing request as csr
Then I add names to certificate signing request csr
"""
{
"COMMON_NAME": "localhost"
}
"""
Then I create a RSA private key pair as cert_key
And I sign the certificate signing request csr with private key cert_key and output it as csr_pem in PEM format
And I submit the certificate signing request PEM csr_pem certificate order to the ACME server as order
And I peak and memorize the next nonce as nonce_value
And I memorize <src_var> with jq "<jq>" as <dest_var>
When I send a raw ACME request to "<url>"
"""
{
"protected": {
"alg": "RS256",
"nonce": "{nonce_value}",
"url": "<url>",
"kid": "{acme_account.uri}",
"jwk": {
"n": "mmEWxUv2lUYDZe_M2FXJ_WDXgHoEG7PVvg-dfz1STzyMwx0qvM66KMenXSyVA0r-_Ssb6p8VexSWGOFKskM4ryKUihn2KNH5e8nXZBqzqYeKQ8vqaCdaWzTxFI1dg0xhk0CWptkZHxpRpLalztFJ1Pq7L2qvQOM2YT7wPYbwQhpaSiVNXAb1W4FwAPyC04v1mHehvST-esaDT7j_5-eU5cCcmyi4_g5nBawcinOjj5o3VCg4X8UjK--AjhAyYHx1nRMr-7xk4x-0VIpQ_OODjLB3WzN8s1YEb0Jx5Bv1JyeCw35zahqs3fAFyRje-p5ENk9NCxfz5x9ZGkszkkNt0Q",
"e": "AQAB",
"kty": "RSA"
}
},
"payload": {}
}
"""
Then the value response.status_code should be equal to 400
And the value response with jq ".status" should be equal to 400
And the value response with jq ".type" should be equal to "urn:ietf:params:acme:error:malformed"
And the value response with jq ".detail" should be equal to "Both JWK and KID are provided in the protected header"
Examples: Endpoints
| src_var | jq | dest_var | url |
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/{account_id}/orders |
| order | . | not_used | {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order |
| order | . | not_used | {order.uri} |
| order | . | not_used | {order.uri}/finalize |
| order | . | not_used | {order.uri}/certificate |
| order | .authorizations[0].uri | auth_uri | {auth_uri} |
| order | .authorizations[0].body.challenges[0].url | challenge_uri | {challenge_uri} |

View File

@@ -6,13 +6,32 @@ Feature: Account
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And the value acme_account.uri with jq "." should match pattern {BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/accounts/(.+)
Scenario: Create a new account with the same key pair twice
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri as kid
And I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account2
And the value error.__class__.__name__ should be equal to "ConflictError"
And the value error.location should be equal to "{kid}"
Scenario: Find an existing account
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri as account_uri
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And the value acme_account.uri should be equal to "{account_uri}"
And I find the existing ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as retrieved_account
And the value retrieved_account.uri should be equal to "{account_uri}"
# Note: This is a very special case for cert-manager.
Scenario: Create a new account with EAB then retrieve it without EAB
Given I have an ACME cert profile as "acme_profile"
When I have an ACME client connecting to "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/directory"
Then I register a new ACME account with email fangpen@infisical.com and EAB key id "{acme_profile.eab_kid}" with secret "{acme_profile.eab_secret}" as acme_account
And I memorize acme_account.uri as account_uri
And I find the existing ACME account without EAB as retrieved_account
And the value error with should be absent
And the value retrieved_account.uri should be equal to "{account_uri}"
Scenario: Create a new account without EAB
Given I have an ACME cert profile as "acme_profile"

View File

@@ -9,6 +9,9 @@ Feature: Directory
{
"newNonce": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-nonce",
"newAccount": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-account",
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order"
"newOrder": "{BASE_URL}/api/v1/pki/acme/profiles/{acme_profile.id}/new-order",
"meta": {
"externalAccountRequired": true
}
}
"""

View File

@@ -387,6 +387,9 @@ def register_account_with_eab(
):
acme_client = context.acme_client
account_public_key = acme_client.net.key.public_key()
if not only_return_existing:
# clear the account in case if we want to register twice
acme_client.net.account = None
if hasattr(context, "alt_eab_url"):
eab_directory = messages.Directory.from_json(
{"newAccount": context.alt_eab_url}
@@ -406,8 +409,14 @@ def register_account_with_eab(
only_return_existing=only_return_existing,
)
try:
context.vars[account_var] = acme_client.new_account(registration)
if not only_return_existing:
context.vars[account_var] = acme_client.new_account(registration)
else:
context.vars[account_var] = acme_client.query_registration(
acme_client.net.account
)
except Exception as exp:
logger.error(f"Failed to register: {exp}", exc_info=True)
context.vars["error"] = exp
@@ -434,6 +443,17 @@ def step_impl(context: Context, email: str, kid: str, secret: str, account_var:
)
@then("I find the existing ACME account without EAB as {account_var}")
def step_impl(context: Context, account_var: str):
acme_client = context.acme_client
# registration = messages.RegistrationResource.from_json(dict(uri=""))
registration = acme_client.net.account
try:
context.vars[account_var] = acme_client.query_registration(registration)
except Exception as exp:
context.vars["error"] = exp
@then("I register a new ACME account with email {email} without EAB")
def step_impl(context: Context, email: str):
acme_client = context.acme_client
@@ -600,6 +620,19 @@ def step_impl(context: Context, var_path: str, jq_query: str):
)
@then("the value {var_path} with should be absent")
def step_impl(context: Context, var_path: str):
try:
value = eval_var(context, var_path)
except Exception as exp:
if isinstance(exp, KeyError):
return
raise
assert False, (
f"value at {var_path!r} should be absent, but we got this instead: {value!r}"
)
@then('the value {var_path} with jq "{jq_query}" should be equal to {expected}')
def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
value, result = apply_value_with_jq(
@@ -615,13 +648,14 @@ def step_impl(context: Context, var_path: str, jq_query: str, expected: str):
@then('the value {var_path} with jq "{jq_query}" should match pattern {regex}')
def step_impl(context: Context, var_path: str, jq_query: str, regex: str):
actual_regex = replace_vars(regex, context.vars)
value, result = apply_value_with_jq(
context=context,
var_path=var_path,
jq_query=jq_query,
)
assert re.match(replace_vars(regex, context.vars), result), (
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {regex!r}"
assert re.match(actual_regex, result), (
f"{json.dumps(value)!r} with jq {jq_query!r}, the result {json.dumps(result)!r} does not match {actual_regex!r}"
)

View File

@@ -15,6 +15,7 @@ from josepy import JSONObjectWithFields
ACC_KEY_BITS = 2048
ACC_KEY_PUBLIC_EXPONENT = 65537
NOCK_API_PREFIX = "/api/__bdd_nock__"
logger = logging.getLogger(__name__)
faker = Faker()
@@ -265,7 +266,7 @@ def x509_cert_to_dict(cert: x509.Certificate) -> dict:
def define_nock(context: Context, definitions: list[dict]):
jwt_token = context.vars["AUTH_TOKEN"]
response = context.http_client.post(
"/api/v1/bdd-nock/define",
f"{NOCK_API_PREFIX}/define",
headers=dict(authorization="Bearer {}".format(jwt_token)),
json=dict(definitions=definitions),
)
@@ -275,7 +276,7 @@ def define_nock(context: Context, definitions: list[dict]):
def restore_nock(context: Context):
jwt_token = context.vars["AUTH_TOKEN"]
response = context.http_client.post(
"/api/v1/bdd-nock/restore",
f"{NOCK_API_PREFIX}/restore",
headers=dict(authorization="Bearer {}".format(jwt_token)),
json=dict(),
)
@@ -285,7 +286,7 @@ def restore_nock(context: Context):
def clean_all_nock(context: Context):
jwt_token = context.vars["AUTH_TOKEN"]
response = context.http_client.post(
"/api/v1/bdd-nock/clean-all",
f"{NOCK_API_PREFIX}/clean-all",
headers=dict(authorization="Bearer {}".format(jwt_token)),
json=dict(),
)

View File

@@ -1,6 +1,8 @@
{
"watch": ["src"],
"watch": [
"src"
],
"ext": ".ts,.js",
"ignore": [],
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
}
"exec": "tsx --tsconfig=./tsconfig.dev.json --inspect=0.0.0.0:9229 ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
}

View File

@@ -128,6 +128,7 @@
"sjcl": "^1.0.8",
"smee-client": "^2.0.0",
"snowflake-sdk": "^1.14.0",
"ssh2": "^1.17.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
@@ -164,6 +165,7 @@
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/ssh2": "^1.15.5",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
@@ -15634,6 +15636,33 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/ssh2": {
"version": "1.15.5",
"resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.5.tgz",
"integrity": "sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "^18.11.18"
}
},
"node_modules/@types/ssh2/node_modules/@types/node": {
"version": "18.19.130",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz",
"integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/@types/ssh2/node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/sshpk": {
"version": "1.10.3",
"resolved": "https://registry.npmjs.org/@types/sshpk/-/sshpk-1.10.3.tgz",
@@ -18061,6 +18090,15 @@
"dev": true,
"license": "MIT"
},
"node_modules/buildcheck": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz",
"integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==",
"optional": true,
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/bullmq": {
"version": "5.4.2",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.4.2.tgz",
@@ -18901,6 +18939,20 @@
"node": ">= 0.10"
}
},
"node_modules/cpu-features": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
"integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
"hasInstallScript": true,
"optional": true,
"dependencies": {
"buildcheck": "~0.0.6",
"nan": "^2.19.0"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/create-hash": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
@@ -24996,9 +25048,9 @@
}
},
"node_modules/nan": {
"version": "2.22.2",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz",
"integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==",
"version": "2.23.1",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.23.1.tgz",
"integrity": "sha512-r7bBUGKzlqk8oPBDYxt6Z0aEdF1G1rwlMcLk8LCOMbOzf0mG+JUfUzG4fIMWwHWP0iyaLWEQZJmtB7nOHEm/qw==",
"license": "MIT"
},
"node_modules/nanoid": {
@@ -31492,6 +31544,23 @@
"node": ">= 0.6"
}
},
"node_modules/ssh2": {
"version": "1.17.0",
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz",
"integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==",
"hasInstallScript": true,
"dependencies": {
"asn1": "^0.2.6",
"bcrypt-pbkdf": "^1.0.2"
},
"engines": {
"node": ">=10.16.0"
},
"optionalDependencies": {
"cpu-features": "~0.0.10",
"nan": "^2.23.0"
}
},
"node_modules/sshpk": {
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",

View File

@@ -32,7 +32,7 @@
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev": "tsx watch --clear-screen=false ./src/main.ts --config tsconfig.dev.json | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup --sourcemap",
"build:frontend": "npm run build --prefix ../frontend",
@@ -110,6 +110,7 @@
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/ssh2": "^1.15.5",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
@@ -257,6 +258,7 @@
"sjcl": "^1.0.8",
"smee-client": "^2.0.0",
"snowflake-sdk": "^1.14.0",
"ssh2": "^1.17.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
@@ -264,4 +266,4 @@
"zod": "^3.22.4",
"zod-to-json-schema": "^3.24.5"
}
}
}

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
import { TableName } from "@app/db/schemas";
const CONSTRAINT_NAME = "unique_pki_acme_account_public_key_and_profile_id";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
if (hasProfileId && hasPublicKeyThumbprint) {
await knex.schema.alterTable(TableName.PkiAcmeAccount, (table) => {
table.unique(["profileId", "publicKeyThumbprint"], { indexName: CONSTRAINT_NAME });
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.PkiAcmeAccount)) {
const hasProfileId = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "profileId");
const hasPublicKeyThumbprint = await knex.schema.hasColumn(TableName.PkiAcmeAccount, "publicKeyThumbprint");
await knex.schema.alterTable(TableName.PkiAcmeAccount, async () => {
if (hasProfileId && hasPublicKeyThumbprint) {
await dropConstraintIfExists(TableName.PkiAcmeAccount, CONSTRAINT_NAME, knex);
}
});
}
}

View File

@@ -9,6 +9,11 @@ import {
SanitizedPostgresAccountWithResourceSchema,
UpdatePostgresAccountSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import {
CreateSSHAccountSchema,
SanitizedSSHAccountWithResourceSchema,
UpdateSSHAccountSchema
} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { registerPamResourceEndpoints } from "./pam-account-endpoints";
@@ -30,5 +35,14 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fasti
createAccountSchema: CreateMySQLAccountSchema,
updateAccountSchema: UpdateMySQLAccountSchema
});
},
[PamResource.SSH]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.SSH,
accountResponseSchema: SanitizedSSHAccountWithResourceSchema,
createAccountSchema: CreateSSHAccountSchema,
updateAccountSchema: UpdateSSHAccountSchema
});
}
};

View File

@@ -2,16 +2,21 @@ import { z } from "zod";
import { PamFoldersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PamAccountOrderBy, PamAccountView } from "@app/ee/services/pam-account/pam-account-enums";
import { SanitizedMySQLAccountWithResourceSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums";
import { SanitizedPostgresAccountWithResourceSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { SanitizedSSHAccountWithResourceSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedAccountSchema = z.union([
SanitizedSSHAccountWithResourceSchema, // ORDER MATTERS
SanitizedPostgresAccountWithResourceSchema,
SanitizedMySQLAccountWithResourceSchema
]);
@@ -26,33 +31,69 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
schema: {
description: "List PAM accounts",
querystring: z.object({
projectId: z.string().uuid()
projectId: z.string().uuid(),
accountPath: z.string().trim().default("/").transform(removeTrailingSlash),
accountView: z.nativeEnum(PamAccountView).default(PamAccountView.Flat),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(100),
orderBy: z.nativeEnum(PamAccountOrderBy).default(PamAccountOrderBy.Name),
orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC),
search: z.string().trim().optional(),
filterResourceIds: z
.string()
.transform((val) =>
val
.split(",")
.map((s) => s.trim())
.filter(Boolean)
)
.optional()
}),
response: {
200: z.object({
accounts: SanitizedAccountSchema.array(),
folders: PamFoldersSchema.array()
folders: PamFoldersSchema.array(),
totalCount: z.number().default(0),
folderId: z.string().optional(),
folderPaths: z.record(z.string(), z.string())
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const response = await server.services.pamAccount.list(req.query.projectId, req.permission);
const { projectId, accountPath, accountView, limit, offset, search, orderBy, orderDirection, filterResourceIds } =
req.query;
const { accounts, folders, totalCount, folderId, folderPaths } = await server.services.pamAccount.list({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId,
accountPath,
accountView,
limit,
offset,
search,
orderBy,
orderDirection,
filterResourceIds
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: req.query.projectId,
projectId,
event: {
type: EventType.PAM_ACCOUNT_LIST,
metadata: {
accountCount: response.accounts.length,
folderCount: response.folders.length
accountCount: accounts.length,
folderCount: folders.length
}
}
});
return response;
return { accounts, folders, totalCount, folderId, folderPaths };
}
});
@@ -93,7 +134,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => {
gatewayClientPrivateKey: z.string(),
gatewayServerCertificateChain: z.string(),
relayHost: z.string(),
metadata: z.record(z.string(), z.string()).optional()
metadata: z.record(z.string(), z.string().optional()).optional()
})
}
},

View File

@@ -9,6 +9,11 @@ import {
SanitizedPostgresResourceSchema,
UpdatePostgresResourceSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import {
CreateSSHResourceSchema,
SanitizedSSHResourceSchema,
UpdateSSHResourceSchema
} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { registerPamResourceEndpoints } from "./pam-resource-endpoints";
@@ -30,5 +35,14 @@ export const PAM_RESOURCE_REGISTER_ROUTER_MAP: Record<PamResource, (server: Fast
createResourceSchema: CreateMySQLResourceSchema,
updateResourceSchema: UpdateMySQLResourceSchema
});
},
[PamResource.SSH]: async (server: FastifyZodProvider) => {
registerPamResourceEndpoints({
server,
resourceType: PamResource.SSH,
resourceResponseSchema: SanitizedSSHResourceSchema,
createResourceSchema: CreateSSHResourceSchema,
updateResourceSchema: UpdateSSHResourceSchema
});
}
};

View File

@@ -5,19 +5,30 @@ import {
MySQLResourceListItemSchema,
SanitizedMySQLResourceSchema
} from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PamResourceOrderBy } from "@app/ee/services/pam-resource/pam-resource-enums";
import {
PostgresResourceListItemSchema,
SanitizedPostgresResourceSchema
} from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import {
SanitizedSSHResourceSchema,
SSHResourceListItemSchema
} from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import { OrderByDirection } from "@app/lib/types";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedResourceSchema = z.union([SanitizedPostgresResourceSchema, SanitizedMySQLResourceSchema]);
const SanitizedResourceSchema = z.union([
SanitizedPostgresResourceSchema,
SanitizedMySQLResourceSchema,
SanitizedSSHResourceSchema
]);
const ResourceOptionsSchema = z.discriminatedUnion("resource", [
PostgresResourceListItemSchema,
MySQLResourceListItemSchema
MySQLResourceListItemSchema,
SSHResourceListItemSchema
]);
export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
@@ -52,17 +63,46 @@ export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
schema: {
description: "List PAM resources",
querystring: z.object({
projectId: z.string().uuid()
projectId: z.string().uuid(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(100),
orderBy: z.nativeEnum(PamResourceOrderBy).default(PamResourceOrderBy.Name),
orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC),
search: z.string().trim().optional(),
filterResourceTypes: z
.string()
.transform((val) =>
val
.split(",")
.map((s) => s.trim())
.filter(Boolean)
)
.optional()
}),
response: {
200: z.object({
resources: SanitizedResourceSchema.array()
resources: SanitizedResourceSchema.array(),
totalCount: z.number().default(0)
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const response = await server.services.pamResource.list(req.query.projectId, req.permission);
const { projectId, limit, offset, search, orderBy, orderDirection, filterResourceTypes } = req.query;
const { resources, totalCount } = await server.services.pamResource.list({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId,
limit,
offset,
search,
orderBy,
orderDirection,
filterResourceTypes
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
@@ -71,12 +111,12 @@ export const registerPamResourceRouter = async (server: FastifyZodProvider) => {
event: {
type: EventType.PAM_RESOURCE_LIST,
metadata: {
count: response.resources.length
count: resources.length
}
}
});
return response;
return { resources, totalCount };
}
});
};

View File

@@ -4,12 +4,21 @@ import { PamSessionsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { MySQLSessionCredentialsSchema } from "@app/ee/services/pam-resource/mysql/mysql-resource-schemas";
import { PostgresSessionCredentialsSchema } from "@app/ee/services/pam-resource/postgres/postgres-resource-schemas";
import { PamSessionCommandLogSchema, SanitizedSessionSchema } from "@app/ee/services/pam-session/pam-session-schemas";
import { SSHSessionCredentialsSchema } from "@app/ee/services/pam-resource/ssh/ssh-resource-schemas";
import {
PamSessionCommandLogSchema,
SanitizedSessionSchema,
TerminalEventSchema
} from "@app/ee/services/pam-session/pam-session-schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SessionCredentialsSchema = z.union([PostgresSessionCredentialsSchema, MySQLSessionCredentialsSchema]);
const SessionCredentialsSchema = z.union([
SSHSessionCredentialsSchema,
PostgresSessionCredentialsSchema,
MySQLSessionCredentialsSchema
]);
export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
// Meant to be hit solely by gateway identities
@@ -32,17 +41,15 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { credentials, projectId, account } = await server.services.pamAccount.getSessionCredentials(
req.params.sessionId,
req.permission
);
const { credentials, projectId, account, sessionStarted } =
await server.services.pamAccount.getSessionCredentials(req.params.sessionId, req.permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event: {
type: EventType.PAM_SESSION_START,
type: EventType.PAM_SESSION_CREDENTIALS_GET,
metadata: {
sessionId: req.params.sessionId,
accountName: account.name
@@ -50,7 +57,22 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
}
});
return { credentials };
if (sessionStarted) {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event: {
type: EventType.PAM_SESSION_START,
metadata: {
sessionId: req.params.sessionId,
accountName: account.name
}
}
});
}
return { credentials: credentials as z.infer<typeof SessionCredentialsSchema> };
}
});
@@ -67,7 +89,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => {
sessionId: z.string().uuid()
}),
body: z.object({
logs: PamSessionCommandLogSchema.array()
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
}),
response: {
200: z.object({

View File

@@ -186,6 +186,7 @@ export enum EventType {
CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth",
UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth",
GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth",
GET_TOKEN_IDENTITY_TOKEN_AUTH = "get-token-identity-token-auth",
ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth",
UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth",
@@ -535,6 +536,7 @@ export enum EventType {
DASHBOARD_GET_SECRET_VALUE = "dashboard-get-secret-value",
DASHBOARD_GET_SECRET_VERSION_VALUE = "dashboard-get-secret-version-value",
PAM_SESSION_CREDENTIALS_GET = "pam-session-credentials-get",
PAM_SESSION_START = "pam-session-start",
PAM_SESSION_LOGS_UPDATE = "pam-session-logs-update",
PAM_SESSION_END = "pam-session-end",
@@ -1029,6 +1031,15 @@ interface GetTokensIdentityTokenAuthEvent {
};
}
interface GetTokenIdentityTokenAuthEvent {
type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
identityName: string;
tokenId: string;
};
}
interface AddIdentityTokenAuthEvent {
type: EventType.ADD_IDENTITY_TOKEN_AUTH;
metadata: {
@@ -3978,6 +3989,14 @@ interface OrgRoleDeleteEvent {
};
}
interface PamSessionCredentialsGetEvent {
type: EventType.PAM_SESSION_CREDENTIALS_GET;
metadata: {
sessionId: string;
accountName: string;
};
}
interface PamSessionStartEvent {
type: EventType.PAM_SESSION_START;
metadata: {
@@ -4214,6 +4233,7 @@ export type Event =
| CreateTokenIdentityTokenAuthEvent
| UpdateTokenIdentityTokenAuthEvent
| GetTokensIdentityTokenAuthEvent
| GetTokenIdentityTokenAuthEvent
| AddIdentityTokenAuthEvent
| UpdateIdentityTokenAuthEvent
| GetIdentityTokenAuthEvent
@@ -4531,6 +4551,7 @@ export type Event =
| OrgRoleCreateEvent
| OrgRoleUpdateEvent
| OrgRoleDeleteEvent
| PamSessionCredentialsGetEvent
| PamSessionStartEvent
| PamSessionLogsUpdateEvent
| PamSessionEndEvent

View File

@@ -39,3 +39,9 @@ export const getDefaultOnPremFeatures = () => {
};
export const setupLicenseRequestWithStore = () => {};
export const getLicenseKeyConfig = () => {
return {
isValid: false
};
};

View File

@@ -1,13 +1,56 @@
import axios, { AxiosError } from "axios";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { getConfig } from "@app/lib/config/env";
import { getConfig, TEnvConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TFeatureSet } from "./license-types";
import { LicenseType, TFeatureSet, TLicenseKeyConfig, TOfflineLicenseContents } from "./license-types";
export const isOfflineLicenseKey = (licenseKey: string): boolean => {
try {
const contents = JSON.parse(Buffer.from(licenseKey, "base64").toString("utf8")) as TOfflineLicenseContents;
return "signature" in contents && "license" in contents;
} catch (error) {
return false;
}
};
export const getLicenseKeyConfig = (
config?: Pick<TEnvConfig, "LICENSE_KEY" | "LICENSE_KEY_OFFLINE">
): TLicenseKeyConfig => {
const cfg = config || getConfig();
if (!cfg) {
return { isValid: false };
}
const licenseKey = cfg.LICENSE_KEY;
if (licenseKey) {
if (isOfflineLicenseKey(licenseKey)) {
return { isValid: true, licenseKey, type: LicenseType.Offline };
}
return { isValid: true, licenseKey, type: LicenseType.Online };
}
const offlineLicenseKey = cfg.LICENSE_KEY_OFFLINE;
// backwards compatibility
if (offlineLicenseKey) {
if (isOfflineLicenseKey(offlineLicenseKey)) {
return { isValid: true, licenseKey: offlineLicenseKey, type: LicenseType.Offline };
}
return { isValid: false };
}
return { isValid: false };
};
export const getDefaultOnPremFeatures = (): TFeatureSet => ({
_id: null,

View File

@@ -22,9 +22,10 @@ import { OrgPermissionBillingActions, OrgPermissionSubjects } from "../permissio
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
import { TLicenseDALFactory } from "./license-dal";
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
import { getDefaultOnPremFeatures, getLicenseKeyConfig, setupLicenseRequestWithStore } from "./license-fns";
import {
InstanceType,
LicenseType,
TAddOrgPmtMethodDTO,
TAddOrgTaxIdDTO,
TCreateOrgPortalSession,
@@ -77,6 +78,7 @@ export const licenseServiceFactory = ({
let instanceType = InstanceType.OnPrem;
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
let selfHostedLicense: TOfflineLicense | null = null;
const licenseKeyConfig = getLicenseKeyConfig(envConfig);
const licenseServerCloudApi = setupLicenseRequestWithStore(
envConfig.LICENSE_SERVER_URL || "",
@@ -85,10 +87,13 @@ export const licenseServiceFactory = ({
envConfig.INTERNAL_REGION
);
const onlineLicenseKey =
licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online ? licenseKeyConfig.licenseKey : "";
const licenseServerOnPremApi = setupLicenseRequestWithStore(
envConfig.LICENSE_SERVER_URL || "",
LICENSE_SERVER_ON_PREM_LOGIN,
envConfig.LICENSE_KEY || "",
onlineLicenseKey,
envConfig.INTERNAL_REGION
);
@@ -131,7 +136,7 @@ export const licenseServiceFactory = ({
return;
}
if (envConfig.LICENSE_KEY) {
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Online) {
const token = await licenseServerOnPremApi.refreshLicense();
if (token) {
await syncLicenseKeyOnPremFeatures(true);
@@ -142,10 +147,10 @@ export const licenseServiceFactory = ({
return;
}
if (envConfig.LICENSE_KEY_OFFLINE) {
if (licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline) {
let isValidOfflineLicense = true;
const contents: TOfflineLicenseContents = JSON.parse(
Buffer.from(envConfig.LICENSE_KEY_OFFLINE, "base64").toString("utf8")
Buffer.from(licenseKeyConfig.licenseKey, "base64").toString("utf8")
);
const isVerified = await verifyOfflineLicense(JSON.stringify(contents.license), contents.signature);
@@ -184,7 +189,7 @@ export const licenseServiceFactory = ({
};
const initializeBackgroundSync = async () => {
if (envConfig.LICENSE_KEY) {
if (licenseKeyConfig?.isValid && licenseKeyConfig?.type === LicenseType.Online) {
logger.info("Setting up background sync process for refresh onPremFeatures");
const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures);
job.start();

View File

@@ -136,3 +136,18 @@ export type TDelOrgTaxIdDTO = TOrgPermission & { taxId: string };
export type TOrgInvoiceDTO = TOrgPermission;
export type TOrgLicensesDTO = TOrgPermission;
export enum LicenseType {
Offline = "offline",
Online = "online"
}
export type TLicenseKeyConfig =
| {
isValid: false;
}
| {
isValid: true;
licenseKey: string;
type: LicenseType;
};

View File

@@ -1,46 +1,109 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TPamAccounts } from "@app/db/schemas";
import { buildFindFilter, ormify, prependTableNameToFindFilter, selectAllTableCols } from "@app/lib/knex";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums";
export type TPamAccountDALFactory = ReturnType<typeof pamAccountDALFactory>;
type PamAccountFindFilter = Parameters<typeof buildFindFilter<TPamAccounts>>[0];
export const pamAccountDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.PamAccount);
const findWithResourceDetails = async (filter: PamAccountFindFilter, tx?: Knex) => {
const query = (tx || db.replicaNode())(TableName.PamAccount)
.leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`)
.select(selectAllTableCols(TableName.PamAccount))
.select(
const findByProjectIdWithResourceDetails = async (
{
projectId,
folderId,
accountView = PamAccountView.Nested,
search,
limit,
offset = 0,
orderBy = PamAccountOrderBy.Name,
orderDirection = OrderByDirection.ASC,
filterResourceIds
}: {
projectId: string;
folderId?: string | null;
accountView?: PamAccountView;
search?: string;
limit?: number;
offset?: number;
orderBy?: PamAccountOrderBy;
orderDirection?: OrderByDirection;
filterResourceIds?: string[];
},
tx?: Knex
) => {
try {
const dbInstance = tx || db.replicaNode();
const query = dbInstance(TableName.PamAccount)
.leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`)
.where(`${TableName.PamAccount}.projectId`, projectId);
if (accountView === PamAccountView.Nested) {
if (folderId) {
void query.where(`${TableName.PamAccount}.folderId`, folderId);
} else {
void query.whereNull(`${TableName.PamAccount}.folderId`);
}
}
if (search) {
// escape special characters (`%`, `_`) and the escape character itself (`\`)
const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
const pattern = `%${escapedSearch}%`;
void query.where((q) => {
void q
.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "name", pattern])
.orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "name", pattern])
.orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "description", pattern]);
});
}
if (filterResourceIds && filterResourceIds.length) {
void query.whereIn(`${TableName.PamAccount}.resourceId`, filterResourceIds);
}
const countQuery = query.clone().count("*", { as: "count" }).first();
void query.select(selectAllTableCols(TableName.PamAccount)).select(
// resource
db.ref("name").withSchema(TableName.PamResource).as("resourceName"),
db.ref("resourceType").withSchema(TableName.PamResource),
db.ref("encryptedRotationAccountCredentials").withSchema(TableName.PamResource)
);
if (filter) {
/* eslint-disable @typescript-eslint/no-misused-promises */
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.PamAccount, filter)));
const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
void query.orderByRaw(`${TableName.PamAccount}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]);
if (typeof limit === "number") {
void query.limit(limit).offset(offset);
}
const [results, countResult] = await Promise.all([query, countQuery]);
const totalCount = Number(countResult?.count || 0);
const accounts = results.map(
// @ts-expect-error resourceName, resourceType, encryptedRotationAccountCredentials are from joined table
({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, ...account }) => ({
...account,
resourceId,
resource: {
id: resourceId,
name: resourceName as string,
resourceType,
encryptedRotationAccountCredentials
}
})
);
return { accounts, totalCount };
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM accounts with resource details" });
}
const accounts = await query;
return accounts.map(
({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, ...account }) => ({
...account,
resourceId,
resource: {
id: resourceId,
name: resourceName,
resourceType,
encryptedRotationAccountCredentials
}
})
);
};
const findAccountsDueForRotation = async (tx?: Knex) => {
@@ -59,5 +122,9 @@ export const pamAccountDALFactory = (db: TDbClient) => {
return accounts;
};
return { ...orm, findWithResourceDetails, findAccountsDueForRotation };
return {
...orm,
findByProjectIdWithResourceDetails,
findAccountsDueForRotation
};
};

View File

@@ -0,0 +1,8 @@
export enum PamAccountOrderBy {
Name = "name"
}
export enum PamAccountView {
Flat = "flat",
Nested = "nested"
}

View File

@@ -1,6 +1,6 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, OrganizationActionScope, TPamAccounts, TPamResources } from "@app/db/schemas";
import { ActionProjectType, OrganizationActionScope, TPamAccounts, TPamFolders, TPamResources } from "@app/db/schemas";
import { PAM_RESOURCE_FACTORY_MAP } from "@app/ee/services/pam-resource/pam-resource-factory";
import { decryptResource, decryptResourceConnectionDetails } from "@app/ee/services/pam-resource/pam-resource-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
@@ -27,12 +27,14 @@ import { getFullPamFolderPath } from "../pam-folder/pam-folder-fns";
import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal";
import { PamResource } from "../pam-resource/pam-resource-enums";
import { TPamAccountCredentials } from "../pam-resource/pam-resource-types";
import { TSqlResourceConnectionDetails } from "../pam-resource/shared/sql/sql-resource-types";
import { TPamSessionDALFactory } from "../pam-session/pam-session-dal";
import { PamSessionStatus } from "../pam-session/pam-session-enums";
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPamAccountDALFactory } from "./pam-account-dal";
import { PamAccountView } from "./pam-account-enums";
import { decryptAccount, decryptAccountCredentials, encryptAccountCredentials } from "./pam-account-fns";
import { TAccessAccountDTO, TCreateAccountDTO, TUpdateAccountDTO } from "./pam-account-types";
import { TAccessAccountDTO, TCreateAccountDTO, TListAccountsDTO, TUpdateAccountDTO } from "./pam-account-types";
type TPamAccountServiceFactoryDep = {
pamResourceDAL: TPamResourceDALFactory;
@@ -251,17 +253,17 @@ export const pamAccountServiceFactory = ({
gatewayV2Service
);
// Logic to prevent overwriting unedited censored values
const finalCredentials = { ...credentials };
if (credentials.password === "__INFISICAL_UNCHANGED__") {
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
projectId: account.projectId,
kmsService
});
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
projectId: account.projectId,
kmsService
});
finalCredentials.password = decryptedCredentials.password;
}
// Logic to prevent overwriting unedited censored values
const finalCredentials = await factory.handleOverwritePreventionForCensoredValues(
credentials,
decryptedCredentials
);
const validatedCredentials = await factory.validateAccountCredentials(finalCredentials);
const encryptedCredentials = await encryptAccountCredentials({
@@ -334,21 +336,96 @@ export const pamAccountServiceFactory = ({
};
};
const list = async (projectId: string, actor: OrgServiceActor) => {
const list = async ({
projectId,
accountPath,
accountView,
actor,
actorId,
actorAuthMethod,
actorOrgId,
...params
}: TListAccountsDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorAuthMethod: actor.authMethod,
actorId: actor.id,
actorOrgId: actor.orgId,
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.PAM
});
const accountsWithResourceDetails = await pamAccountDAL.findWithResourceDetails({ projectId });
const limit = params.limit || 20;
const offset = params.offset || 0;
const canReadFolders = permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.PamFolders);
const folders = canReadFolders ? await pamFolderDAL.find({ projectId }) : [];
const folder = accountPath === "/" ? null : await pamFolderDAL.findByPath(projectId, accountPath);
if (accountPath !== "/" && !folder) {
return { accounts: [], folders: [], totalCount: 0, folderPaths: {} };
}
const folderId = folder?.id;
let totalFolderCount = 0;
if (canReadFolders && accountView === PamAccountView.Nested) {
const { totalCount } = await pamFolderDAL.findByProjectId({
projectId,
parentId: folderId,
search: params.search
});
totalFolderCount = totalCount;
}
let folders: TPamFolders[] = [];
if (canReadFolders && accountView === PamAccountView.Nested && offset < totalFolderCount) {
const folderLimit = Math.min(limit, totalFolderCount - offset);
const { folders: foldersResp } = await pamFolderDAL.findByProjectId({
projectId,
parentId: folderId,
limit: folderLimit,
offset,
search: params.search,
orderBy: params.orderBy,
orderDirection: params.orderDirection
});
folders = foldersResp;
}
let accountsWithResourceDetails: Awaited<
ReturnType<typeof pamAccountDAL.findByProjectIdWithResourceDetails>
>["accounts"] = [];
let totalAccountCount = 0;
const accountsToFetch = limit - folders.length;
if (accountsToFetch > 0) {
const accountOffset = Math.max(0, offset - totalFolderCount);
const { accounts, totalCount } = await pamAccountDAL.findByProjectIdWithResourceDetails({
projectId,
folderId,
accountView,
offset: accountOffset,
limit: accountsToFetch,
search: params.search,
orderBy: params.orderBy,
orderDirection: params.orderDirection,
filterResourceIds: params.filterResourceIds
});
accountsWithResourceDetails = accounts;
totalAccountCount = totalCount;
} else {
// if no accounts are to be fetched for the current page, we still need the total count for pagination
const { totalCount } = await pamAccountDAL.findByProjectIdWithResourceDetails({
projectId,
folderId,
accountView,
search: params.search,
filterResourceIds: params.filterResourceIds
});
totalAccountCount = totalCount;
}
const totalCount = totalFolderCount + totalAccountCount;
const decryptedAndPermittedAccounts: Array<
TPamAccounts & {
@@ -359,12 +436,6 @@ export const pamAccountServiceFactory = ({
> = [];
for await (const account of accountsWithResourceDetails) {
const accountPath = await getFullPamFolderPath({
pamFolderDAL,
folderId: account.folderId,
projectId: account.projectId
});
// Check permission for each individual account
if (
permission.can(
@@ -391,9 +462,27 @@ export const pamAccountServiceFactory = ({
}
}
const folderPaths: Record<string, string> = {};
const accountFolderIds = [
...new Set(decryptedAndPermittedAccounts.flatMap((a) => (a.folderId ? [a.folderId] : [])))
];
await Promise.all(
accountFolderIds.map(async (fId) => {
folderPaths[fId] = await getFullPamFolderPath({
pamFolderDAL,
folderId: fId,
projectId
});
})
);
return {
accounts: decryptedAndPermittedAccounts,
folders
folders,
totalCount,
folderId,
folderPaths
};
};
@@ -486,11 +575,11 @@ export const pamAccountServiceFactory = ({
case PamResource.Postgres:
case PamResource.MySQL:
{
const connectionCredentials = await decryptResourceConnectionDetails({
const connectionCredentials = (await decryptResourceConnectionDetails({
encryptedConnectionDetails: resource.encryptedConnectionDetails,
kmsService,
projectId: account.projectId
});
})) as TSqlResourceConnectionDetails;
const credentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
@@ -506,6 +595,19 @@ export const pamAccountServiceFactory = ({
};
}
break;
case PamResource.SSH:
{
const credentials = await decryptAccountCredentials({
encryptedCredentials: account.encryptedCredentials,
kmsService,
projectId: account.projectId
});
metadata = {
username: credentials.username
};
}
break;
default:
break;
}
@@ -566,11 +668,6 @@ export const pamAccountServiceFactory = ({
throw new BadRequestError({ message: "Session has ended or expired" });
}
// Verify that the session has not already had credentials fetched
if (session.status !== PamSessionStatus.Starting) {
throw new BadRequestError({ message: "Session has already been started" });
}
const account = await pamAccountDAL.findById(session.accountId);
if (!account) throw new NotFoundError({ message: `Account with ID '${session.accountId}' not found` });
@@ -587,11 +684,16 @@ export const pamAccountServiceFactory = ({
const decryptedResource = await decryptResource(resource, session.projectId, kmsService);
let sessionStarted = false;
// Mark session as started
await pamSessionDAL.updateById(sessionId, {
status: PamSessionStatus.Active,
startedAt: new Date()
});
if (session.status === PamSessionStatus.Starting) {
await pamSessionDAL.updateById(sessionId, {
status: PamSessionStatus.Active,
startedAt: new Date()
});
sessionStarted = true;
}
return {
credentials: {
@@ -599,7 +701,8 @@ export const pamAccountServiceFactory = ({
...decryptedAccount.credentials
},
projectId: project.id,
account
account,
sessionStarted
};
};

View File

@@ -1,4 +1,7 @@
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
import { TPamAccount } from "../pam-resource/pam-resource-types";
import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums";
// DTOs
export type TCreateAccountDTO = Pick<
@@ -18,3 +21,14 @@ export type TAccessAccountDTO = {
actorUserAgent: string;
duration: number;
};
export type TListAccountsDTO = {
accountPath: string;
accountView: PamAccountView;
search?: string;
orderBy?: PamAccountOrderBy;
orderDirection?: OrderByDirection;
limit?: number;
offset?: number;
filterResourceIds?: string[];
} & TProjectPermission;

View File

@@ -1,9 +1,106 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { PamAccountOrderBy } from "../pam-account/pam-account-enums";
export type TPamFolderDALFactory = ReturnType<typeof pamFolderDALFactory>;
export const pamFolderDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.PamFolder);
return { ...orm };
const findByProjectId = async (
{
projectId,
parentId,
search,
limit,
offset = 0,
orderBy = PamAccountOrderBy.Name,
orderDirection = OrderByDirection.ASC
}: {
projectId: string;
parentId?: string | null;
search?: string;
limit?: number;
offset?: number;
orderBy?: PamAccountOrderBy;
orderDirection?: OrderByDirection;
},
tx?: Knex
) => {
try {
const dbInstance = tx || db.replicaNode();
const query = dbInstance(TableName.PamFolder).where(`${TableName.PamFolder}.projectId`, projectId);
if (parentId) {
void query.where(`${TableName.PamFolder}.parentId`, parentId);
} else {
void query.whereNull(`${TableName.PamFolder}.parentId`);
}
if (search) {
// escape special characters (`%`, `_`) and the escape character itself (`\`)
const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
void query.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamFolder, "name", `%${escapedSearch}%`]);
}
const countQuery = query.clone().count("*", { as: "count" }).first();
void query.select(selectAllTableCols(TableName.PamFolder));
const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
void query.orderByRaw(`${TableName.PamFolder}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]);
if (typeof limit === "number") {
void query.limit(limit).offset(offset);
}
const [folders, countResult] = await Promise.all([query, countQuery]);
const totalCount = Number(countResult?.count || 0);
return { folders, totalCount };
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM folders" });
}
};
const findByPath = async (projectId: string, path: string, tx?: Knex) => {
try {
const dbInstance = tx || db.replicaNode();
const pathSegments = path.split("/").filter(Boolean);
let parentId: string | null = null;
let currentFolder: Awaited<ReturnType<typeof orm.findOne>> | undefined;
for await (const segment of pathSegments) {
const query = dbInstance(TableName.PamFolder)
.where(`${TableName.PamFolder}.projectId`, projectId)
.where(`${TableName.PamFolder}.name`, segment);
if (parentId) {
void query.where(`${TableName.PamFolder}.parentId`, parentId);
} else {
void query.whereNull(`${TableName.PamFolder}.parentId`);
}
currentFolder = await query.first();
if (!currentFolder) {
return undefined;
}
parentId = currentFolder.id;
}
return currentFolder;
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM folder by path" });
}
};
return { ...orm, findByProjectId, findByPath };
};

View File

@@ -2,7 +2,11 @@ import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { PamResourceOrderBy } from "./pam-resource-enums";
export type TPamResourceDALFactory = ReturnType<typeof pamResourceDALFactory>;
export const pamResourceDALFactory = (db: TDbClient) => {
@@ -20,5 +24,65 @@ export const pamResourceDALFactory = (db: TDbClient) => {
return doc;
};
return { ...orm, findById };
const findByProjectId = async (
{
projectId,
search,
limit,
offset = 0,
orderBy = PamResourceOrderBy.Name,
orderDirection = OrderByDirection.ASC,
filterResourceTypes
}: {
projectId: string;
search?: string;
limit?: number;
offset?: number;
orderBy?: PamResourceOrderBy;
orderDirection?: OrderByDirection;
filterResourceTypes?: string[];
},
tx?: Knex
) => {
try {
const dbInstance = tx || db.replicaNode();
const query = dbInstance(TableName.PamResource).where(`${TableName.PamResource}.projectId`, projectId);
if (search) {
// escape special characters (`%`, `_`) and the escape character itself (`\`)
const escapedSearch = search.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
const pattern = `%${escapedSearch}%`;
void query.where((q) => {
void q
.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "name", pattern])
.orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "resourceType", pattern]);
});
}
if (filterResourceTypes && filterResourceTypes.length) {
void query.whereIn(`${TableName.PamResource}.resourceType`, filterResourceTypes);
}
const countQuery = query.clone().count("*", { as: "count" }).first();
void query.select(selectAllTableCols(TableName.PamResource));
const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC";
void query.orderByRaw(`${TableName.PamResource}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]);
if (typeof limit === "number") {
void query.limit(limit).offset(offset);
}
const [resources, countResult] = await Promise.all([query, countQuery]);
const totalCount = Number(countResult?.count || 0);
return { resources, totalCount };
} catch (error) {
throw new DatabaseError({ error, name: "Find PAM resources" });
}
};
return { ...orm, findById, findByProjectId };
};

View File

@@ -1,4 +1,9 @@
export enum PamResource {
Postgres = "postgres",
MySQL = "mysql"
MySQL = "mysql",
SSH = "ssh"
}
export enum PamResourceOrderBy {
Name = "name"
}

View File

@@ -1,10 +1,12 @@
import { PamResource } from "./pam-resource-enums";
import { TPamAccountCredentials, TPamResourceConnectionDetails, TPamResourceFactory } from "./pam-resource-types";
import { sqlResourceFactory } from "./shared/sql/sql-resource-factory";
import { sshResourceFactory } from "./ssh/ssh-resource-factory";
type TPamResourceFactoryImplementation = TPamResourceFactory<TPamResourceConnectionDetails, TPamAccountCredentials>;
export const PAM_RESOURCE_FACTORY_MAP: Record<PamResource, TPamResourceFactoryImplementation> = {
[PamResource.Postgres]: sqlResourceFactory as TPamResourceFactoryImplementation,
[PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation
[PamResource.MySQL]: sqlResourceFactory as TPamResourceFactoryImplementation,
[PamResource.SSH]: sshResourceFactory as TPamResourceFactoryImplementation
};

View File

@@ -20,7 +20,7 @@ import {
encryptResourceConnectionDetails,
listResourceOptions
} from "./pam-resource-fns";
import { TCreateResourceDTO, TUpdateResourceDTO } from "./pam-resource-types";
import { TCreateResourceDTO, TListResourcesDTO, TUpdateResourceDTO } from "./pam-resource-types";
type TPamResourceServiceFactoryDep = {
pamResourceDAL: TPamResourceDALFactory;
@@ -192,19 +192,18 @@ export const pamResourceServiceFactory = ({
gatewayV2Service
);
// Logic to prevent overwriting unedited censored values
const finalCredentials = { ...rotationAccountCredentials };
if (
resource.encryptedRotationAccountCredentials &&
rotationAccountCredentials.password === "__INFISICAL_UNCHANGED__"
) {
let finalCredentials = { ...rotationAccountCredentials };
if (resource.encryptedRotationAccountCredentials) {
const decryptedCredentials = await decryptAccountCredentials({
encryptedCredentials: resource.encryptedRotationAccountCredentials,
projectId: resource.projectId,
kmsService
});
finalCredentials.password = decryptedCredentials.password;
finalCredentials = await factory.handleOverwritePreventionForCensoredValues(
rotationAccountCredentials,
decryptedCredentials
);
}
try {
@@ -268,22 +267,23 @@ export const pamResourceServiceFactory = ({
}
};
const list = async (projectId: string, actor: OrgServiceActor) => {
const list = async ({ projectId, actor, actorId, actorAuthMethod, actorOrgId, ...params }: TListResourcesDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorAuthMethod: actor.authMethod,
actorId: actor.id,
actorOrgId: actor.orgId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId,
actionProjectType: ActionProjectType.PAM
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PamResources);
const resources = await pamResourceDAL.find({ projectId });
const { resources, totalCount } = await pamResourceDAL.findByProjectId({ projectId, ...params });
return {
resources: await Promise.all(resources.map((resource) => decryptResource(resource, projectId, kmsService)))
resources: await Promise.all(resources.map((resource) => decryptResource(resource, projectId, kmsService))),
totalCount
};
};

View File

@@ -1,3 +1,5 @@
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service";
import {
TMySQLAccount,
@@ -5,22 +7,31 @@ import {
TMySQLResource,
TMySQLResourceConnectionDetails
} from "./mysql/mysql-resource-types";
import { PamResource } from "./pam-resource-enums";
import { PamResource, PamResourceOrderBy } from "./pam-resource-enums";
import {
TPostgresAccount,
TPostgresAccountCredentials,
TPostgresResource,
TPostgresResourceConnectionDetails
} from "./postgres/postgres-resource-types";
import {
TSSHAccount,
TSSHAccountCredentials,
TSSHResource,
TSSHResourceConnectionDetails
} from "./ssh/ssh-resource-types";
// Resource types
export type TPamResource = TPostgresResource | TMySQLResource;
export type TPamResourceConnectionDetails = TPostgresResourceConnectionDetails | TMySQLResourceConnectionDetails;
export type TPamResource = TPostgresResource | TMySQLResource | TSSHResource;
export type TPamResourceConnectionDetails =
| TPostgresResourceConnectionDetails
| TMySQLResourceConnectionDetails
| TSSHResourceConnectionDetails;
// Account types
export type TPamAccount = TPostgresAccount | TMySQLAccount;
export type TPamAccount = TPostgresAccount | TMySQLAccount | TSSHAccount;
// eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents
export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials;
export type TPamAccountCredentials = TPostgresAccountCredentials | TMySQLAccountCredentials | TSSHAccountCredentials;
// Resource DTOs
export type TCreateResourceDTO = Pick<
@@ -32,6 +43,15 @@ export type TUpdateResourceDTO = Partial<Omit<TCreateResourceDTO, "resourceType"
resourceId: string;
};
export type TListResourcesDTO = {
search?: string;
orderBy?: PamResourceOrderBy;
orderDirection?: OrderByDirection;
limit?: number;
offset?: number;
filterResourceTypes?: string[];
} & TProjectPermission;
// Resource factory
export type TPamResourceFactoryValidateConnection<T extends TPamResourceConnectionDetails> = () => Promise<T>;
export type TPamResourceFactoryValidateAccountCredentials<C extends TPamAccountCredentials> = (
@@ -51,4 +71,5 @@ export type TPamResourceFactory<T extends TPamResourceConnectionDetails, C exten
validateConnection: TPamResourceFactoryValidateConnection<T>;
validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<C>;
rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<C>;
handleOverwritePreventionForCensoredValues: (updatedAccountCredentials: C, currentCredentials: C) => Promise<C>;
};

View File

@@ -337,9 +337,24 @@ export const sqlResourceFactory: TPamResourceFactory<TSqlResourceConnectionDetai
}
};
const handleOverwritePreventionForCensoredValues = async (
updatedAccountCredentials: TSqlAccountCredentials,
currentCredentials: TSqlAccountCredentials
) => {
if (updatedAccountCredentials.password === "__INFISICAL_UNCHANGED__") {
return {
...updatedAccountCredentials,
password: currentCredentials.password
};
}
return updatedAccountCredentials;
};
return {
validateConnection,
validateAccountCredentials,
rotateAccountCredentials
rotateAccountCredentials,
handleOverwritePreventionForCensoredValues
};
};

View File

@@ -0,0 +1,5 @@
export enum SSHAuthMethod {
Password = "password",
PublicKey = "public-key",
Certificate = "certificate"
}

View File

@@ -0,0 +1,265 @@
import { Client } from "ssh2";
import { BadRequestError } from "@app/lib/errors";
import { GatewayProxyProtocol } from "@app/lib/gateway";
import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2";
import { logger } from "@app/lib/logger";
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
import { TGatewayV2ServiceFactory } from "../../gateway-v2/gateway-v2-service";
import { PamResource } from "../pam-resource-enums";
import {
TPamResourceFactory,
TPamResourceFactoryRotateAccountCredentials,
TPamResourceFactoryValidateAccountCredentials
} from "../pam-resource-types";
import { SSHAuthMethod } from "./ssh-resource-enums";
import { TSSHAccountCredentials, TSSHResourceConnectionDetails } from "./ssh-resource-types";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
export const executeWithGateway = async <T>(
config: {
connectionDetails: TSSHResourceConnectionDetails;
resourceType: PamResource;
gatewayId: string;
},
gatewayV2Service: Pick<TGatewayV2ServiceFactory, "getPlatformConnectionDetailsByGatewayId">,
operation: (proxyPort: number) => Promise<T>
): Promise<T> => {
const { connectionDetails, gatewayId } = config;
const [targetHost] = await verifyHostInputValidity(connectionDetails.host, true);
const platformConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({
gatewayId,
targetHost,
targetPort: connectionDetails.port
});
if (!platformConnectionDetails) {
throw new BadRequestError({ message: "Unable to connect to gateway, no platform connection details found" });
}
return withGatewayV2Proxy(
async (proxyPort) => {
return operation(proxyPort);
},
{
protocol: GatewayProxyProtocol.Tcp,
relayHost: platformConnectionDetails.relayHost,
gateway: platformConnectionDetails.gateway,
relay: platformConnectionDetails.relay
}
);
};
export const sshResourceFactory: TPamResourceFactory<TSSHResourceConnectionDetails, TSSHAccountCredentials> = (
resourceType,
connectionDetails,
gatewayId,
gatewayV2Service
) => {
const validateConnection = async () => {
try {
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => {
return new Promise<void>((resolve, reject) => {
const client = new Client();
let handshakeComplete = false;
client.on("error", (err) => {
logger.info(
{ error: err.message, handshakeComplete },
"[SSH Resource Factory] SSH client error event received"
);
// If we got an authentication error, it means we successfully reached the SSH server
// and completed the SSH handshake - that's good enough for connection validation
if (handshakeComplete || err.message.includes("authentication") || err.message.includes("publickey")) {
logger.info(
{ handshakeComplete, errorMessage: err.message },
"[SSH Resource Factory] SSH connection validation succeeded (auth error after handshake)"
);
client.end();
resolve();
} else {
logger.error(
{ error: err.message, handshakeComplete },
"[SSH Resource Factory] SSH connection validation failed"
);
reject(err);
}
});
client.on("handshake", () => {
// SSH handshake completed - the server is reachable and responding
logger.info("[SSH Resource Factory] SSH handshake event received - setting handshakeComplete to true");
handshakeComplete = true;
client.end();
resolve();
});
client.on("timeout", () => {
logger.error("[SSH Resource Factory] SSH connection timeout");
reject(new Error("Connection timeout"));
});
// Attempt connection with a dummy username (we don't care about auth success)
// The goal is just to verify SSH server is reachable and responding
client.connect({
host: "localhost",
port: proxyPort,
username: "infisical-connection-test",
password: "infisical-connection-test-password",
readyTimeout: EXTERNAL_REQUEST_TIMEOUT,
tryKeyboard: false,
// We want to fail fast on auth, we're just testing reachability
authHandler: () => {
// If authHandler is called, SSH handshake succeeded
handshakeComplete = true;
return false; // Don't continue with auth
}
});
});
});
return connectionDetails;
} catch (error) {
throw new BadRequestError({
message: `Unable to validate connection to ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const validateAccountCredentials: TPamResourceFactoryValidateAccountCredentials<TSSHAccountCredentials> = async (
credentials
) => {
try {
await executeWithGateway({ connectionDetails, gatewayId, resourceType }, gatewayV2Service, async (proxyPort) => {
return new Promise<void>((resolve, reject) => {
const client = new Client();
client.on("ready", () => {
logger.info(
{ username: credentials.username, authMethod: credentials.authMethod },
"[SSH Resource Factory] SSH authentication successful"
);
client.end();
resolve();
});
client.on("error", (err) => {
logger.error(
{ error: err.message, username: credentials.username, authMethod: credentials.authMethod },
"[SSH Resource Factory] SSH authentication failed"
);
reject(err);
});
client.on("timeout", () => {
logger.error(
{ username: credentials.username, authMethod: credentials.authMethod },
"[SSH Resource Factory] SSH authentication timeout"
);
reject(new Error("Connection timeout"));
});
// Build connection config based on auth method
const baseConfig = {
host: "localhost",
port: proxyPort,
username: credentials.username,
readyTimeout: EXTERNAL_REQUEST_TIMEOUT
};
switch (credentials.authMethod) {
case SSHAuthMethod.Password:
client.connect({
...baseConfig,
password: credentials.password,
tryKeyboard: false
});
break;
case SSHAuthMethod.PublicKey:
client.connect({
...baseConfig,
privateKey: credentials.privateKey,
tryKeyboard: false
});
break;
default:
reject(new Error(`Unsupported SSH auth method: ${(credentials as TSSHAccountCredentials).authMethod}`));
}
});
});
return credentials;
} catch (error) {
if (error instanceof Error) {
// Check for common authentication failure messages
if (
error.message.includes("authentication") ||
error.message.includes("All configured authentication methods failed") ||
error.message.includes("publickey")
) {
throw new BadRequestError({
message: "Account credentials invalid."
});
}
if (error.message === "Connection timeout") {
throw new BadRequestError({
message: "Connection timeout. Verify that the SSH server is reachable"
});
}
}
throw new BadRequestError({
message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}`
});
}
};
const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials<TSSHAccountCredentials> = async (
rotationAccountCredentials
) => {
return rotationAccountCredentials;
};
const handleOverwritePreventionForCensoredValues = async (
updatedAccountCredentials: TSSHAccountCredentials,
currentCredentials: TSSHAccountCredentials
) => {
if (updatedAccountCredentials.authMethod !== currentCredentials.authMethod) {
return updatedAccountCredentials;
}
if (
updatedAccountCredentials.authMethod === SSHAuthMethod.Password &&
currentCredentials.authMethod === SSHAuthMethod.Password
) {
if (updatedAccountCredentials.password === "__INFISICAL_UNCHANGED__") {
return {
...updatedAccountCredentials,
password: currentCredentials.password
};
}
}
if (
updatedAccountCredentials.authMethod === SSHAuthMethod.PublicKey &&
currentCredentials.authMethod === SSHAuthMethod.PublicKey
) {
if (updatedAccountCredentials.privateKey === "__INFISICAL_UNCHANGED__") {
return {
...updatedAccountCredentials,
privateKey: currentCredentials.privateKey
};
}
}
return updatedAccountCredentials;
};
return {
validateConnection,
validateAccountCredentials,
rotateAccountCredentials,
handleOverwritePreventionForCensoredValues
};
};

View File

@@ -0,0 +1,117 @@
import { z } from "zod";
import { PamResource } from "../pam-resource-enums";
import {
BaseCreatePamAccountSchema,
BaseCreatePamResourceSchema,
BasePamAccountSchema,
BasePamAccountSchemaWithResource,
BasePamResourceSchema,
BaseUpdatePamAccountSchema,
BaseUpdatePamResourceSchema
} from "../pam-resource-schemas";
import { SSHAuthMethod } from "./ssh-resource-enums";
export const BaseSSHResourceSchema = BasePamResourceSchema.extend({ resourceType: z.literal(PamResource.SSH) });
export const SSHResourceListItemSchema = z.object({
name: z.literal("SSH"),
resource: z.literal(PamResource.SSH)
});
export const SSHResourceConnectionDetailsSchema = z.object({
host: z.string().trim().max(255),
port: z.number()
});
export const SSHPasswordCredentialsSchema = z.object({
authMethod: z.literal(SSHAuthMethod.Password),
username: z.string().trim().max(255),
password: z.string().trim().max(255)
});
export const SSHPublicKeyCredentialsSchema = z.object({
authMethod: z.literal(SSHAuthMethod.PublicKey),
username: z.string().trim().max(255),
privateKey: z.string().trim().max(5000)
});
export const SSHCertificateCredentialsSchema = z.object({
authMethod: z.literal(SSHAuthMethod.Certificate),
username: z.string().trim().max(255)
});
export const SSHAccountCredentialsSchema = z.discriminatedUnion("authMethod", [
SSHPasswordCredentialsSchema,
SSHPublicKeyCredentialsSchema,
SSHCertificateCredentialsSchema
]);
export const SSHResourceSchema = BaseSSHResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema,
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});
export const SanitizedSSHResourceSchema = BaseSSHResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema,
rotationAccountCredentials: z
.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(SSHAuthMethod.Password),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.PublicKey),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.Certificate),
username: z.string()
})
])
.nullable()
.optional()
});
export const CreateSSHResourceSchema = BaseCreatePamResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema,
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});
export const UpdateSSHResourceSchema = BaseUpdatePamResourceSchema.extend({
connectionDetails: SSHResourceConnectionDetailsSchema.optional(),
rotationAccountCredentials: SSHAccountCredentialsSchema.nullable().optional()
});
// Accounts
export const SSHAccountSchema = BasePamAccountSchema.extend({
credentials: SSHAccountCredentialsSchema
});
export const CreateSSHAccountSchema = BaseCreatePamAccountSchema.extend({
credentials: SSHAccountCredentialsSchema
});
export const UpdateSSHAccountSchema = BaseUpdatePamAccountSchema.extend({
credentials: SSHAccountCredentialsSchema.optional()
});
export const SanitizedSSHAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({
credentials: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(SSHAuthMethod.Password),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.PublicKey),
username: z.string()
}),
z.object({
authMethod: z.literal(SSHAuthMethod.Certificate),
username: z.string()
})
])
});
// Sessions
export const SSHSessionCredentialsSchema = SSHResourceConnectionDetailsSchema.and(SSHAccountCredentialsSchema);

View File

@@ -0,0 +1,16 @@
import { z } from "zod";
import {
SSHAccountCredentialsSchema,
SSHAccountSchema,
SSHResourceConnectionDetailsSchema,
SSHResourceSchema
} from "./ssh-resource-schemas";
// Resources
export type TSSHResource = z.infer<typeof SSHResourceSchema>;
export type TSSHResourceConnectionDetails = z.infer<typeof SSHResourceConnectionDetailsSchema>;
// Accounts
export type TSSHAccount = z.infer<typeof SSHAccountSchema>;
export type TSSHAccountCredentials = z.infer<typeof SSHAccountCredentialsSchema>;

View File

@@ -2,7 +2,7 @@ import { TPamSessions } from "@app/db/schemas";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TPamSanitizedSession, TPamSessionCommandLog } from "./pam-session.types";
import { TPamSanitizedSession, TPamSessionCommandLog, TTerminalEvent } from "./pam-session-types";
export const decryptSessionCommandLogs = async ({
projectId,
@@ -22,7 +22,7 @@ export const decryptSessionCommandLogs = async ({
cipherTextBlob: encryptedLogs
});
return JSON.parse(decryptedPlainTextBlob.toString()) as TPamSessionCommandLog;
return JSON.parse(decryptedPlainTextBlob.toString()) as (TPamSessionCommandLog | TTerminalEvent)[];
};
export const decryptSession = async (
@@ -32,7 +32,7 @@ export const decryptSession = async (
) => {
return {
...session,
commandLogs: session.encryptedLogsBlob
logs: session.encryptedLogsBlob
? await decryptSessionCommandLogs({
projectId,
encryptedLogs: session.encryptedLogsBlob,

View File

@@ -8,8 +8,18 @@ export const PamSessionCommandLogSchema = z.object({
timestamp: z.coerce.date()
});
// SSH Terminal Event schemas
export const TerminalEventTypeSchema = z.enum(["input", "output", "resize", "error"]);
export const TerminalEventSchema = z.object({
timestamp: z.coerce.date(),
eventType: TerminalEventTypeSchema,
data: z.string(), // Base64 encoded binary data
elapsedTime: z.number() // Seconds since session start (for replay)
});
export const SanitizedSessionSchema = PamSessionsSchema.omit({
encryptedLogsBlob: true
}).extend({
commandLogs: PamSessionCommandLogSchema.array()
logs: z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema]))
});

View File

@@ -12,10 +12,10 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
import { ProjectPermissionPamSessionActions, ProjectPermissionSub } from "../permission/project-permission";
import { TUpdateSessionLogsDTO } from "./pam-session.types";
import { TPamSessionDALFactory } from "./pam-session-dal";
import { PamSessionStatus } from "./pam-session-enums";
import { decryptSession } from "./pam-session-fns";
import { TUpdateSessionLogsDTO } from "./pam-session-types";
type TPamSessionServiceFactoryDep = {
pamSessionDAL: TPamSessionDALFactory;

View File

@@ -1,12 +1,13 @@
import { z } from "zod";
import { PamSessionCommandLogSchema, SanitizedSessionSchema } from "./pam-session-schemas";
import { PamSessionCommandLogSchema, SanitizedSessionSchema, TerminalEventSchema } from "./pam-session-schemas";
export type TPamSessionCommandLog = z.infer<typeof PamSessionCommandLogSchema>;
export type TTerminalEvent = z.infer<typeof TerminalEventSchema>;
export type TPamSanitizedSession = z.infer<typeof SanitizedSessionSchema>;
// DTOs
export type TUpdateSessionLogsDTO = {
sessionId: string;
logs: TPamSessionCommandLog[];
logs: (TPamSessionCommandLog | TTerminalEvent)[];
};

View File

@@ -74,7 +74,12 @@ export const pkiAcmeChallengeServiceFactory = ({
// Notice: well, we are in a transaction, ideally we should not hold transaction and perform
// a long running operation for long time. But assuming we are not performing a tons of
// challenge validation at the same time, it should be fine.
const challengeResponse = await fetch(challengeUrl, { signal: AbortSignal.timeout(timeoutMs) });
const challengeResponse = await fetch(challengeUrl, {
// In case if we override the host in the development mode, still provide the original host in the header
// to help the upstream server to validate the request
headers: { Host: host },
signal: AbortSignal.timeout(timeoutMs)
});
if (challengeResponse.status !== 200) {
throw new AcmeIncorrectResponseError({
message: `ACME challenge response is not 200: ${challengeResponse.status}`

View File

@@ -58,7 +58,15 @@ export const GetAcmeDirectoryResponseSchema = z.object({
newNonce: z.string(),
newAccount: z.string(),
newOrder: z.string(),
revokeCert: z.string().optional()
revokeCert: z.string().optional(),
meta: z
.object({
termsOfService: z.string().optional(),
website: z.string().optional(),
caaIdentities: z.array(z.string()).optional(),
externalAccountRequired: z.boolean().optional()
})
.optional()
});
// New Account payload schema

View File

@@ -206,6 +206,9 @@ export const pkiAcmeServiceFactory = ({
const { protectedHeader: rawProtectedHeader, payload: rawPayload } = result;
try {
const protectedHeader = ProtectedHeaderSchema.parse(rawProtectedHeader);
if (protectedHeader.jwk && protectedHeader.kid) {
throw new AcmeMalformedError({ message: "Both JWK and KID are provided in the protected header" });
}
const parsedUrl = (() => {
try {
return new URL(protectedHeader.url);
@@ -288,6 +291,7 @@ export const pkiAcmeServiceFactory = ({
url,
rawJwsPayload,
getJWK: async (protectedHeader) => {
// get jwk instead of kid
if (!protectedHeader.kid) {
throw new AcmeMalformedError({ message: "KID is required in the protected header" });
}
@@ -353,7 +357,10 @@ export const pkiAcmeServiceFactory = ({
return {
newNonce: buildUrl(profile.id, "/new-nonce"),
newAccount: buildUrl(profile.id, "/new-account"),
newOrder: buildUrl(profile.id, "/new-order")
newOrder: buildUrl(profile.id, "/new-order"),
meta: {
externalAccountRequired: true
}
};
};
@@ -386,11 +393,61 @@ export const pkiAcmeServiceFactory = ({
payload: TCreateAcmeAccountPayload;
}): Promise<TAcmeResponse<TCreateAcmeAccountResponse>> => {
const profile = await validateAcmeProfile(profileId);
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
profileId,
alg,
publicKeyThumbprint
);
if (onlyReturnExisting) {
if (!existingAccount) {
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
}
return {
status: 200,
body: {
status: "valid",
contact: existingAccount.emails,
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
},
headers: {
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
}
};
}
// Note: We only check EAB for the new account request. This is a very special case for cert-manager.
// There's a bug in their ACME client implementation, they don't take the account KID value they have
// and relying on a '{"onlyReturnExisting": true}' new-account request to find out their KID value.
// But the problem is, that new-account request doesn't come with EAB. And while the get existing account operation
// fails, they just discard the error and proceed to request a new order. Since no KID provided, their ACME
// client will send JWK instead. As a result, we are seeing KID not provide in header error for the new-order
// endpoint.
//
// To solve the problem, we lose the check for EAB a bit for the onlyReturnExisting new account request.
// It should be fine as we've already checked EAB when they created the account.
// And the private key ownership indicating they are the same user.
// ref: https://github.com/cert-manager/cert-manager/issues/7388#issuecomment-3535630925
if (!externalAccountBinding) {
throw new AcmeExternalAccountRequiredError({ message: "External account binding is required" });
}
if (existingAccount) {
return {
status: 200,
body: {
status: "valid",
contact: existingAccount.emails,
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
},
headers: {
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
}
};
}
const publicKeyThumbprint = await calculateJwkThumbprint(jwk, "sha256");
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: profile.projectId,
projectDAL,
@@ -441,30 +498,7 @@ export const pkiAcmeServiceFactory = ({
});
}
const existingAccount: TPkiAcmeAccounts | null = await acmeAccountDAL.findByProfileIdAndPublicKeyThumbprintAndAlg(
profileId,
alg,
publicKeyThumbprint
);
if (onlyReturnExisting && !existingAccount) {
throw new AcmeAccountDoesNotExistError({ message: "ACME account not found" });
}
if (existingAccount) {
// With the same public key, we found an existing account, just return it
return {
status: 200,
body: {
status: "valid",
contact: existingAccount.emails,
orders: buildUrl(profile.id, `/accounts/${existingAccount.id}/orders`)
},
headers: {
Location: buildUrl(profile.id, `/accounts/${existingAccount.id}`),
Link: `<${buildUrl(profile.id, "/directory")}>;rel="index"`
}
};
}
// TODO: handle unique constraint violation error, should be very very rare
const newAccount = await acmeAccountDAL.create({
profileId: profile.id,
alg,

View File

@@ -354,16 +354,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
(tx || db.replicaNode())(TableName.SecretApprovalRequest)
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.join(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.leftJoin(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.SecretApprovalPolicyApprover}.approverGroupId`,
`${TableName.UserGroupMembership}.groupId`
)
.where({ projectId })
.where((qb) => {
if (policyId) void qb.where(`${TableName.SecretApprovalPolicy}.id`, policyId);
@@ -373,10 +378,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
void bd
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId)
)
.select("status", `${TableName.SecretApprovalRequest}.id`)
.groupBy(`${TableName.SecretApprovalRequest}.id`, "status")
.count("status")
)
.select("status")
.from("temp")
@@ -499,7 +504,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
const query = (tx || db.replicaNode())
.select("*")
.select(db.raw("count(*) OVER() as total_count"))
.from(innerQuery)
.orderBy("createdAt", "desc") as typeof innerQuery;
@@ -519,6 +523,14 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
});
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const countResult = await (tx || db.replicaNode())
.count({ count: "*" })
.from(query.clone().as("count_query"))
.first();
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const totalCount = Number(countResult?.count || 0);
const docs = await (tx || db)
.with("w", query)
.select("*")
@@ -526,9 +538,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
.where("w.rank", ">=", offset)
.andWhere("w.rank", "<", offset + limit);
// @ts-expect-error knex does not infer
const totalCount = Number(docs[0]?.total_count || 0);
const formattedDoc = sqlNestRelationships({
data: docs,
key: "id",

View File

@@ -584,6 +584,10 @@ export const TOKEN_AUTH = {
offset: "The offset to start from. If you enter 10, it will start from the 10th token.",
limit: "The number of tokens to return."
},
GET_TOKEN: {
identityId: "The ID of the machine identity to get the token for.",
tokenId: "The ID of the token to get metadata for."
},
CREATE_TOKEN: {
identityId: "The ID of the machine identity to create the token for.",
name: "The name of the token to create."

View File

@@ -400,7 +400,7 @@ const envSchema = z
isAcmeDevelopmentMode: data.NODE_ENV === "development" && data.ACME_DEVELOPMENT_MODE,
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
isBddNockApiEnabled: data.NODE_ENV === "development" && data.BDD_NOCK_API_ENABLED,
isBddNockApiEnabled: data.NODE_ENV !== "production" && data.BDD_NOCK_API_ENABLED,
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()
?.split(",")
.map((el) => {

View File

@@ -0,0 +1,104 @@
import type { Definition } from "nock";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { ForbiddenRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
// When running in production, we don't want to even import nock, because it's not needed and it increases memory usage a lots.
// It once caused an outage in the production environment.
// This is why we would rather to crash the app if it's not in development mode (in that case, Kubernetes should stop it from rolling out).
if (process.env.NODE_ENV === "production") {
throw new Error("BDD Nock API can only be enabled in development or test mode");
}
export const registerBddNockRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const importNock = async () => {
// eslint-disable-next-line import/no-extraneous-dependencies
const { default: nock } = await import("nock");
return nock;
};
const checkIfBddNockApiEnabled = () => {
// Note: Please note that this API is only available in development mode and only for BDD tests.
// This endpoint should NEVER BE ENABLED IN PRODUCTION!
if (appCfg.NODE_ENV === "production" || !appCfg.isBddNockApiEnabled) {
throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
}
};
server.route({
method: "POST",
url: "/define",
schema: {
body: z.object({ definitions: z.unknown().array() }),
response: {
200: z.object({ status: z.string() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
checkIfBddNockApiEnabled();
const { body } = req;
const { definitions } = body;
logger.info(definitions, "Defining nock");
const processedDefinitions = definitions.map((definition: unknown) => {
const { path, ...rest } = definition as Definition;
return {
...rest,
path:
path !== undefined && typeof path === "string"
? path
: new RegExp((path as unknown as { regex: string }).regex ?? "")
} as Definition;
});
const nock = await importNock();
nock.define(processedDefinitions);
// Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
if (!nock.isActive()) {
nock.activate();
}
return { status: "ok" };
}
});
server.route({
method: "POST",
url: "/clean-all",
schema: {
response: {
200: z.object({ status: z.string() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async () => {
checkIfBddNockApiEnabled();
logger.info("Cleaning all nocks");
const nock = await importNock();
nock.cleanAll();
return { status: "ok" };
}
});
server.route({
method: "POST",
url: "/restore",
schema: {
response: {
200: z.object({ status: z.string() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async () => {
checkIfBddNockApiEnabled();
logger.info("Restore network requests from nock");
const nock = await importNock();
nock.restore();
return { status: "ok" };
}
});
};

View File

@@ -0,0 +1,6 @@
export const registerBddNockRouter = async () => {
// This route is only available in development or test mode.
// The actual implementation is in the dev.ts file and will be aliased to that file in development or test mode.
// And if somehow we try to enable it in production, we will throw an error.
throw new Error("BDD Nock should not be enabled in production");
};

View File

@@ -1,3 +1,4 @@
import { registerBddNockRouter } from "@bdd_routes/bdd-nock-router";
import { CronJob } from "cron";
import { Knex } from "knex";
import { monitorEventLoopDelay } from "perf_hooks";
@@ -2431,6 +2432,7 @@ export const registerRoutes = async (
}
}
await kmsService.startService(hsmStatus);
await telemetryQueue.startTelemetryCheck();
await telemetryQueue.startAggregatedEventsJob();
await dailyResourceCleanUp.init();
@@ -2443,7 +2445,6 @@ export const registerRoutes = async (
await pkiSubscriberQueue.startDailyAutoRenewalJob();
await pkiAlertV2Queue.init();
await certificateV3Queue.init();
await kmsService.startService(hsmStatus);
await microsoftTeamsService.start();
await dynamicSecretQueueService.init();
await eventBusService.init();
@@ -2698,6 +2699,12 @@ export const registerRoutes = async (
await server.register(registerV3Routes, { prefix: "/api/v3" });
await server.register(registerV4Routes, { prefix: "/api/v4" });
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
// This route should NEVER BE ENABLED IN PRODUCTION!
if (getConfig().isBddNockApiEnabled) {
await server.register(registerBddNockRouter, { prefix: "/api/__bdd_nock__" });
}
server.addHook("onClose", async () => {
cronJobs.forEach((job) => job.stop());
await telemetryService.flushAll();

View File

@@ -9,6 +9,8 @@ import {
SuperAdminSchema,
UsersSchema
} from "@app/db/schemas";
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
import { LicenseType } from "@app/ee/services/license/license-types";
import { getConfig, overridableKeys } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError } from "@app/lib/errors";
@@ -65,6 +67,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
const config = await getServerCfg();
const serverEnvs = getConfig();
const licenseKeyConfig = getLicenseKeyConfig();
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
return {
config: {
...config,
@@ -73,7 +78,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED,
isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE
isOfflineUsageReportsEnabled: hasOfflineLicense
}
};
}

View File

@@ -1,87 +0,0 @@
// import { z } from "zod";
// import { getConfig } from "@app/lib/config/env";
// import { ForbiddenRequestError } from "@app/lib/errors";
// import { logger } from "@app/lib/logger";
// import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
// import { AuthMode } from "@app/services/auth/auth-type";
// export const registerBddNockRouter = async (server: FastifyZodProvider) => {
// const checkIfBddNockApiEnabled = () => {
// const appCfg = getConfig();
// // Note: Please note that this API is only available in development mode and only for BDD tests.
// // This endpoint should NEVER BE ENABLED IN PRODUCTION!
// if (appCfg.NODE_ENV !== "development" || !appCfg.isBddNockApiEnabled) {
// throw new ForbiddenRequestError({ message: "BDD Nock API is not enabled" });
// }
// };
// server.route({
// method: "POST",
// url: "/define",
// schema: {
// body: z.object({ definitions: z.unknown().array() }),
// response: {
// 200: z.object({ status: z.string() })
// }
// },
// onRequest: verifyAuth([AuthMode.JWT]),
// handler: async (req) => {
// checkIfBddNockApiEnabled();
// const { body } = req;
// const { definitions } = body;
// logger.info(definitions, "Defining nock");
// const processedDefinitions = definitions.map((definition: unknown) => {
// const { path, ...rest } = definition as Definition;
// return {
// ...rest,
// path:
// path !== undefined && typeof path === "string"
// ? path
// : new RegExp((path as unknown as { regex: string }).regex ?? "")
// } as Definition;
// });
// nock.define(processedDefinitions);
// // Ensure we are activating the nocks, because we could have called `nock.restore()` before this call.
// if (!nock.isActive()) {
// nock.activate();
// }
// return { status: "ok" };
// }
// });
// server.route({
// method: "POST",
// url: "/clean-all",
// schema: {
// response: {
// 200: z.object({ status: z.string() })
// }
// },
// onRequest: verifyAuth([AuthMode.JWT]),
// handler: async () => {
// checkIfBddNockApiEnabled();
// logger.info("Cleaning all nocks");
// nock.cleanAll();
// return { status: "ok" };
// }
// });
// server.route({
// method: "POST",
// url: "/restore",
// schema: {
// response: {
// 200: z.object({ status: z.string() })
// }
// },
// onRequest: verifyAuth([AuthMode.JWT]),
// handler: async () => {
// checkIfBddNockApiEnabled();
// logger.info("Restore network requests from nock");
// nock.restore();
// return { status: "ok" };
// }
// });
// };

View File

@@ -314,7 +314,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
tokenType: z.literal("Bearer"),
tokenData: IdentityAccessTokensSchema
})
}
},
@@ -346,7 +347,8 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityTokenAuth.accessTokenTTL,
accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL
accessTokenMaxTTL: identityTokenAuth.accessTokenMaxTTL,
tokenData: identityAccessToken
};
}
});
@@ -406,6 +408,60 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
}
});
server.route({
method: "GET",
url: "/token-auth/identities/:identityId/tokens/:tokenId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.TokenAuth],
description: "Get token for machine identity with Token Auth",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().describe(TOKEN_AUTH.GET_TOKEN.identityId),
tokenId: z.string().describe(TOKEN_AUTH.GET_TOKEN.tokenId)
}),
response: {
200: z.object({
token: IdentityAccessTokensSchema
})
}
},
handler: async (req) => {
const { token, identityMembershipOrg } = await server.services.identityTokenAuth.getTokenAuthTokenById({
identityId: req.params.identityId,
tokenId: req.params.tokenId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
isActorSuperAdmin: isSuperAdmin(req.auth)
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg.scopeOrgId,
event: {
type: EventType.GET_TOKEN_IDENTITY_TOKEN_AUTH,
metadata: {
identityId: token.identityId,
identityName: identityMembershipOrg.identity.name,
tokenId: token.id
}
}
});
return { token };
}
});
server.route({
method: "PATCH",
url: "/token-auth/tokens/:tokenId",

View File

@@ -8,7 +8,6 @@ import { registerSecretSyncRouter, SECRET_SYNC_REGISTER_ROUTER_MAP } from "@app/
import { registerAdminRouter } from "./admin-router";
import { registerAuthRoutes } from "./auth-router";
// import { registerBddNockRouter } from "./bdd-nock-router";
import { registerProjectBotRouter } from "./bot-router";
import { registerCaRouter } from "./certificate-authority-router";
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
@@ -238,10 +237,4 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await server.register(registerEventRouter, { prefix: "/events" });
await server.register(registerUpgradePathRouter, { prefix: "/upgrade-path" });
// Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests.
// This route should NEVER BE ENABLED IN PRODUCTION!
// if (getConfig().isBddNockApiEnabled) {
// await server.register(registerBddNockRouter, { prefix: "/bdd-nock" });
// }
};

View File

@@ -430,7 +430,10 @@ describe("CertificateProfileService", () => {
projectId: "project-123",
data: {
...validProfileData,
enrollmentType: EnrollmentType.ACME
enrollmentType: EnrollmentType.ACME,
acmeConfig: {},
apiConfig: undefined,
estConfig: undefined
}
})
).rejects.toThrowError(

View File

@@ -213,7 +213,7 @@ export const certificateProfileServiceFactory = ({
throw new NotFoundError({ message: "Project not found" });
}
const plan = await licenseService.getPlan(project.orgId);
if (!plan.pkiAcme) {
if (!plan.pkiAcme && data.enrollmentType === EnrollmentType.ACME) {
throw new BadRequestError({
message: "Failed to create certificate profile: Plan restriction. Upgrade plan to continue"
});

View File

@@ -18,7 +18,6 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
.where(filter)
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
.select(selectAllTableCols(TableName.IdentityAccessToken))
.select(db.ref("name").withSchema(TableName.Identity))
.select(db.ref("orgId").withSchema(TableName.Identity).as("identityScopeOrgId"))
.first();

View File

@@ -38,6 +38,7 @@ import {
TAttachTokenAuthDTO,
TCreateTokenAuthTokenDTO,
TGetTokenAuthDTO,
TGetTokenAuthTokenByIdDTO,
TGetTokenAuthTokensDTO,
TRevokeTokenAuthDTO,
TRevokeTokenAuthTokenDTO,
@@ -618,6 +619,52 @@ export const identityTokenAuthServiceFactory = ({
return { tokens, identityMembershipOrg };
};
const getTokenAuthTokenById = async ({
tokenId,
identityId,
isActorSuperAdmin,
actorId,
actor,
actorAuthMethod,
actorOrgId
}: TGetTokenAuthTokenByIdDTO) => {
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
const identityMembershipOrg = await membershipIdentityDAL.getIdentityById({
scopeData: {
scope: AccessScope.Organization,
orgId: actorOrgId
},
identityId
});
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.TOKEN_AUTH)) {
throw new BadRequestError({
message: "The identity does not have Token Auth"
});
}
const { permission } = await permissionService.getOrgPermission({
scope: OrganizationActionScope.Any,
actor,
actorId,
orgId: identityMembershipOrg.scopeOrgId,
actorAuthMethod,
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
const token = await identityAccessTokenDAL.findOne({
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH,
[`${TableName.IdentityAccessToken}.identityId` as "identityId"]: identityId
});
if (!token) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
return { token, identityMembershipOrg };
};
const updateTokenAuthToken = async ({
tokenId,
name,
@@ -797,6 +844,7 @@ export const identityTokenAuthServiceFactory = ({
revokeIdentityTokenAuth,
createTokenAuthToken,
getTokenAuthTokens,
getTokenAuthTokenById,
updateTokenAuthToken,
revokeTokenAuthToken
};

View File

@@ -40,6 +40,12 @@ export type TGetTokenAuthTokensDTO = {
isActorSuperAdmin?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TGetTokenAuthTokenByIdDTO = {
tokenId: string;
identityId: string;
isActorSuperAdmin?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateTokenAuthTokenDTO = {
tokenId: string;
name?: string;

View File

@@ -1,7 +1,8 @@
import crypto from "crypto";
import { getLicenseKeyConfig } from "@app/ee/services/license/license-fns";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { getConfig } from "@app/lib/config/env";
import { LicenseType } from "@app/ee/services/license/license-types";
import { BadRequestError } from "@app/lib/errors";
import { TOfflineUsageReportDALFactory } from "./offline-usage-report-dal";
@@ -30,10 +31,13 @@ export const offlineUsageReportServiceFactory = ({
};
const generateUsageReportCSV = async () => {
const cfg = getConfig();
if (!cfg.LICENSE_KEY_OFFLINE) {
const licenseKeyConfig = getLicenseKeyConfig();
const hasOfflineLicense = licenseKeyConfig.isValid && licenseKeyConfig.type === LicenseType.Offline;
if (!hasOfflineLicense) {
throw new BadRequestError({
message: "Offline usage reports are not enabled. LICENSE_KEY_OFFLINE must be configured."
message:
"Offline usage reports are not enabled. Usage reports are only available for self-hosted offline instances"
});
}

View File

@@ -112,7 +112,7 @@ export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
export const SECRET_SYNC_SKIP_FIELDS_MAP: Record<SecretSync, string[]> = {
[SecretSync.AWSParameterStore]: [],
[SecretSync.AWSSecretsManager]: ["mappingBehavior", "secretName"],
[SecretSync.AWSSecretsManager]: ["mappingBehavior"],
[SecretSync.GitHub]: [],
[SecretSync.GCPSecretManager]: [],
[SecretSync.AzureKeyVault]: [],

View File

@@ -0,0 +1,9 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"paths": {
"@app/*": ["./src/*"],
"@bdd_routes/bdd-nock-router": ["./src/server/routes/bdd/bdd-nock-router.dev.ts"]
}
}
}

View File

@@ -24,7 +24,8 @@
"skipLibCheck": true,
"baseUrl": ".",
"paths": {
"@app/*": ["./src/*"]
"@app/*": ["./src/*"],
"@bdd_routes/*": ["./src/server/routes/bdd/*"]
},
"jsx": "react-jsx"
},

View File

@@ -2,8 +2,8 @@
import path from "node:path";
import fs from "fs/promises";
import {replaceTscAliasPaths} from "tsc-alias";
import {defineConfig} from "tsup";
import { replaceTscAliasPaths } from "tsc-alias";
import { defineConfig } from "tsup";
// Instead of using tsx or tsc for building, consider using tsup.
// TSX serves as an alternative to Node.js, allowing you to build directly on the Node.js runtime.
@@ -29,7 +29,7 @@ export default defineConfig({
external: ["../../../frontend/node_modules/next/dist/server/next-server.js"],
outDir: "dist",
tsconfig: "./tsconfig.json",
entry: ["./src"],
entry: ["./src", "!./src/**/*.dev.ts"],
sourceMap: true,
skipNodeModulesBundle: true,
esbuildPlugins: [
@@ -45,22 +45,22 @@ export default defineConfig({
const isRelativePath = args.path.startsWith(".");
const absPath = isRelativePath
? path.join(args.resolveDir, args.path)
: path.join(args.path.replace("@app", "./src"));
: path.join(args.path.replace("@app", "./src").replace("@bdd_routes", "./src/server/routes/bdd"));
const isFile = await fs
.stat(`${absPath}.ts`)
.then((el) => el.isFile)
.catch(async (err) => {
if (err.code === "ENOTDIR") {
return true;
}
.catch(async (err) => {
if (err.code === "ENOTDIR") {
return true;
}
// If .ts file doesn't exist, try checking for .tsx file
return fs
.stat(`${absPath}.tsx`)
.then((el) => el.isFile)
.catch((err) => err.code === "ENOTDIR");
});
// If .ts file doesn't exist, try checking for .tsx file
return fs
.stat(`${absPath}.tsx`)
.then((el) => el.isFile)
.catch((err) => err.code === "ENOTDIR");
});
return {
path: isFile ? `${args.path}.mjs` : `${args.path}/index.mjs`,

View File

@@ -28,7 +28,8 @@ export default defineConfig({
},
resolve: {
alias: {
"@app": path.resolve(__dirname, "./src")
"@app": path.resolve(__dirname, "./src"),
"@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts")
}
}
});

View File

@@ -11,7 +11,8 @@ export default defineConfig({
},
resolve: {
alias: {
"@app": path.resolve(__dirname, "./src")
"@app": path.resolve(__dirname, "./src"),
"@bdd_routes/bdd-nock-router": path.resolve(__dirname, "./src/server/routes/bdd/bdd-nock-router.dev.ts")
}
}
});

View File

@@ -71,6 +71,7 @@ services:
ports:
- 4000:4000
- 9464:9464 # for OTEL collection of Prometheus metrics
- 9229:9229 # For debugger access
environment:
- NODE_ENV=development
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable

File diff suppressed because it is too large Load Diff

View File

@@ -35,7 +35,7 @@ Infisical consists of several tightly integrated products, each designed to solv
- [Secrets Management](/documentation/platform/secrets-mgmt/overview): Securely store, access, and distribute secrets across environments with fine-grained controls, automatic rotation, and audit logging.
- [Secrets Scanning](/documentation/platform/secret-scanning/overview): Detect hardcoded secrets in code, CI pipelines, and infrastructure—integrated with GitHub, GitLab, Bitbucket, and more.
- [Infisical PKI](/documentation/platform/pki/overview): Issue and manage X.509 certificates using protocols like EST, with support for internal and external CAs.
- [Certificate Management](/documentation/platform/pki/overview): Issue and manage X.509 certificates using protocols like EST, with support for internal and external CAs.
- [Infisical SSH](/documentation/platform/ssh/overview): Provide short-lived SSH access to servers using certificate-based authentication, replacing static keys with policy-driven, time-bound control.
- [Infisical KMS](/documentation/platform/kms/overview): Encrypt and decrypt data using centrally managed keys with enforced access policies and full audit visibility.
- [Infisical PAM](/documentation/platform/pam/overview): Manage access to resources like databases, servers, and accounts with policy-based controls and approvals.

View File

@@ -16,15 +16,37 @@ Key Features:
- Role Assignment: Identities must be assigned [roles](/documentation/platform/access-controls/role-based-access-controls). These roles determine the scope of access to resources, either at the organization level or project level.
- Auth/Token Configuration: Identities must be configured with corresponding authentication methods and access token properties to securely interact with the Infisical API.
## Scopes
Identities can be created either at the organization-level or the project-level. Outside of identity management and scope of operation, organization and project identities are functionally identical.
- Project identities are managed at the project-level and can only operate within their respective project.
Project-level identities are useful for organizations that delegate responsibility to autonomous teams via projects.
- Organization identities are managed at the organization-level and can be assigned to one or more projects, as well as
perform organization-level operations. Organization-level identities are useful for organizations that have cross-project operations.
## Workflow
A typical workflow for using identities consists of four steps:
<Tabs>
<Tab title="Project Identities">
A typical workflow for using project identities consists of three steps:
1. Creating the identity with a name and [role](/documentation/platform/access-controls/role-based-access-controls) in Organization Access Control > Machine Identities.
This step also involves configuring an authentication method for it.
2. Adding the identity to the project(s) you want it to have access to.
3. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back.
4. Authenticating subsequent requests with the Infisical API using the short-lived access token.
1. Creating the identity with a name and [role](/documentation/platform/access-controls/role-based-access-controls) in Project > Access Control > Machine Identities.
This step also involves configuring an authentication method for it.
2. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back.
3. Authenticating subsequent requests with the Infisical API using the short-lived access token.
</Tab>
<Tab title="Organization Identities">
A typical workflow for using organization identities consists of four steps:
1. Creating the identity with a name and [role](/documentation/platform/access-controls/role-based-access-controls) in Organization > Access Control > Machine Identities.
This step also involves configuring an authentication method for it.
2. Adding the identity to the project(s) you want it to have access to.
3. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back.
4. Authenticating subsequent requests with the Infisical API using the short-lived access token.
</Tab>
</Tabs>
## Authentication Methods

View File

@@ -1,66 +1,63 @@
---
title: "ACME-compatible CA"
description: "Learn how to automatically provision and manage TLS certificates using ACME Certificate Authorities like Let's Encrypt with Infisical PKI"
description: "Learn how to connect Infisical to an ACME-compatible CA to issue certificates."
---
## Concept
The Infisical ACME integration allows you to connect with ACME (Automatic Certificate Management Environment) Certificate Authorities to automatically issue and manage publicly trusted TLS certificates for your [subscribers](/documentation/platform/pki/subscribers). This integration enables you to leverage established public CA infrastructure like Let's Encrypt while centralizing your certificate management within Infisical.
Infisical can connect to any upstream ACME-compatible CA (e.g. Lets's Encrypt, DigiCert, etc.) supporting the [ACME protocol](https://en.wikipedia.org/wiki/Automatic_Certificate_Management_Environment) to issue certificates back to your end-entities. This integration uses the [DNS-01 challenge](https://letsencrypt.org/docs/challenge-types/#dns-01-challenge) method as part of the ACME domain validation challenge workflow for a requested certificate.
ACME is a protocol that automates the process of certificate issuance and renewal through domain validation challenges. The integration is perfect for obtaining trusted X.509 certificates for public-facing services and is capable of automatically renewing certificates as needed.
The upstream ACME-compatible CA integration lets you connect Infisical to providers by specifying
their **ACME Directory URL** such as:
- [Let's Encrypt](/documentation/platform/pki/ca/lets-encrypt): `https://acme-v02.api.letsencrypt.org/directory`.
- [DigiCert](/documentation/platform/pki/ca/digicert): `https://acme.digicert.com/v2/acme/directory`.
- Google GTS: `https://dv.acme-v02.api.pki.goog/directory`.
- Buypass: `https://api.buypass.com/acme/directory`.
- ZeroSSL: `https://acme.zerossl.com/v2/DV90`.
- SSL.com: `https://acme.ssl.com/sslcom-dv-rsa`.
When Infisical requests a certificate from an ACME-compatible CA, it creates a TXT record at `_acme-challenge.{your-domain}` in your configured DNS provider (e.g. Route53, Cloudflare, etc.); this TXT record contains the challenge token issued by the ACME-compatible CA to validate domain control for the requested certificate.
The ACME provider checks for the existence of this TXT record to verify domain control before issuing the certificate back to Infisical.
After validation completes successfully, Infisical automatically removes the TXT record from your DNS provider.
<div align="center">
```mermaid
graph TD
A[ACME CA Provider<br>e.g., Let's Encrypt] <-->|ACME v2 Protocol| B[Infisical]
B -->|Creates TXT Records<br>via Route53/Cloudflare| C[DNS Validation]
B -->|Manages Certificates| D[Subscribers]
A[ACME-compatible CA] <-->|ACME v2 Protocol| B[Infisical]
B -->|Creates TXT Records<br>via DNS Provider| C[DNS Validation]
B -->|Manages Certificates| D[End-Entities]
```
</div>
As part of the workflow, you configure DNS provider credentials, register an ACME CA provider with Infisical, and create subscribers to represent the certificates you wish to issue. Each issued certificate is automatically managed through its lifecycle, including renewal before expiration.
We recommend reading about [ACME protocol](https://tools.ietf.org/html/rfc8555) and [DNS-01 challenges](https://letsencrypt.org/docs/challenge-types/#dns-01-challenge) for a fuller understanding of the underlying technology.
We recommend reading about [ACME protocol](https://tools.ietf.org/html/rfc8555) and [DNS-01 challenges](https://letsencrypt.org/docs/challenge-types/#dns-01-challenge) for a fuller understanding of the underlying workflow.
## Workflow
A typical workflow for using Infisical with ACME Certificate Authorities consists of the following steps:
A typical workflow for using Infisical with an external ACME-compatible CA consists of the following steps:
1. Setting up AWS Route53 or Cloudflare credentials with appropriate DNS permissions.
2. Creating an AWS/Cloudflare connection in Infisical to store the credentials.
3. Registering an ACME Certificate Authority (like Let's Encrypt) with Infisical.
4. Creating subscribers that use the ACME CA as their issuing authority.
5. Managing certificate lifecycle events such as issuance, renewal, and revocation through Infisical.
1. Setting up your DNS provider (e.g. Route53, Cloudflare, etc.) with appropriate DNS permissions.
2. Creating an [App Connection](/integrations/app-connections/overview) in Infisical to store credentials for Infisical to connect to your DNS provider and create/remove DNS records as part of the DNS-01 challenge.
3. Registering an [External CA](/documentation/platform/pki/ca/external-ca) in Infisical with the ACME type and inputting required configuration including the **ACME Directory URL** of the upstream ACME-compatible CA and the **App Connection** for your DNS provider.
## Understanding ACME DNS-01 Challenge
Once this is complete, you can create a [certificate profile](/documentation/platform/pki/certificates/profiles) linked to the External CA proceed to request a certificate against it.
The DNS-01 challenge is the method used by ACME CA providers to verify that you control a domain before issuing a certificate. Here's how Infisical handles this process:
## Guide to Connecting Infisical to an ACME-compatible CA
1. **Challenge Request**: When you request a certificate, the ACME provider (like Let's Encrypt) issues a challenge token.
2. **DNS Record Creation**: Infisical creates a TXT record at `_acme-challenge.<YOUR_DOMAIN>` with a value derived from the challenge token.
3. **DNS Propagation**: The TXT record must propagate through the DNS system (usually takes a few minutes, depending on TTL settings).
4. **Validation**: The ACME provider checks for the existence of this TXT record to verify domain control.
5. **Cleanup**: After validation completes successfully, Infisical automatically removes the TXT record from your DNS.
This automated process eliminates the need for manual intervention in domain validation, streamlining certificate issuance.
## Guide
In the following steps, we explore how to set up ACME Certificate Authority integration with Infisical using Let's Encrypt as an example.
In the following steps, we explore how to connect Infisical to an ACME-compatible CA.
<Steps>
<Step title="Create App Connection with Required Permissions">
Before proceeding with the ACME CA registration, you need to set up an App Connection with the appropriate permissions for DNS validation:
<Step title="Create an App Connection to your DNS provider">
Before registering an ACME-compatible CA with Infisical, you need to set up an [App Connection](/integrations/app-connections/overview) with the appropriate permissions for Infisical to perform the DNS-01 challenge with your DNS provider.
If you dont see a specific DNS provider listed below or need a dedicated one, please reach out to sales@infisical.com and well help get that enabled for you.
<Tabs>
<Tab title="Route53">
1. Navigate to your Organization Settings > App Connections and create a new AWS connection.
1. Navigate to your Certificate Management Project > App Connections and create a new AWS connection.
2. Ensure your AWS connection has the following minimum permissions for Route53 DNS validation:
@@ -112,7 +109,7 @@ In the following steps, we explore how to set up ACME Certificate Authority inte
For detailed instructions on setting up an AWS connection, see the [AWS Connection](/integrations/app-connections/aws) documentation.
</Tab>
<Tab title="Cloudflare">
1. Navigate to your Organization Settings > App Connections and create a new Cloudflare connection.
1. Navigate to your Certificate Management Project > App Connections and create a new Cloudflare connection.
2. Ensure your Cloudflare token has the following minimum permissions for DNS validation:
@@ -125,51 +122,33 @@ In the following steps, we explore how to set up ACME Certificate Authority inte
</Tab>
</Tabs>
</Step>
<Step title="Register ACME Certificate Authority">
<Step title="Register an ACME-compatible CA">
<Tabs>
<Tab title="Infisical UI">
<Steps>
<Step title="Create ACME CA">
To register an ACME CA, head to your Project > Internal PKI > Certificate Authorities and press the **+** button in the External Certificate Authorities section.
To register an ACME-compatible CA, head to your Certificate Management Project > Certificate Authorities > External Certificate Authorities and press **Create CA**.
![pki register external ca](/images/platform/pki/ca/external-ca/create-external-ca-button.png)
![pki register external ca](/images/platform/pki/ca/external-ca/create-external-ca-button.png)
Fill out the details for the ACME CA registration:
Here, set the **CA Type** to **ACME** and fill out details for it.
![pki register external ca details](/images/platform/pki/ca/external-ca/create-external-ca-form.png)
![pki register external ca details](/images/platform/pki/ca/external-ca/create-external-ca-form.png)
Here's guidance on each field:
Here's some guidance for each field:
- **Type**: Select "ACME" as the External CA type.
- **Name**: Enter a name for the ACME CA (e.g., "lets-encrypt-production").
- **DNS App Connection**: Select from available DNS app connections or configure a new one. This connection provides Infisical with the credentials needed to create and remove DNS records for ACME validation.
- **Zone ID**: Enter the Zone ID for the domain(s) you'll be requesting certificates for.
- **Directory URL**: Enter the ACME v2 directory URL for your chosen CA provider (e.g., `https://acme-v02.api.letsencrypt.org/directory` for Let's Encrypt).
- **Account Email**: Email address to associate with your ACME account. This email will receive important notifications about your certificates.
- **Enable Direct Issuance**: Toggle on to allow direct certificate issuance without requiring subscribers.
- **EAB Key Identifier (KID)**: (Optional) The Key Identifier (KID) provided by your ACME CA for External Account Binding (EAB). This is required by some ACME providers (e.g., ZeroSSL, DigiCert) to link your ACME account to an external account you've pre-registered with them.
- **EAB HMAC Key**: (Optional) The HMAC Key provided by your ACME CA for External Account Binding (EAB). This key is used in conjunction with the KID to prove ownership of the external account during ACME account registration.
- Name: A slug-friendly name for the ACME-compatible CA such as `lets-encrypt-production`.
- DNS App Connection: The App Connection from Step 1 used for Infisical to connect to your DNS provider and create/remove DNS records as part of the DNS-01 challenge in ACME.
- Zone / Zone ID: Enter the Zone / Zone ID for the domain(s) you'll be requesting certificates for.
- Directory URL: Enter the **ACME Directory URL** for your desired upstream ACME-compatible CA such as `https://acme-v02.api.letsencrypt.org/directory` for Let's Encrypt.
- Account Email: The email address to associate with your ACME account. This email will receive important notifications about your certificates.
- EAB Key Identifier (KID): (Optional) The Key Identifier (KID) provided by your ACME CA for External Account Binding (EAB). This is required by some ACME providers (e.g., ZeroSSL, DigiCert) to link your ACME account to an external account you've pre-registered with them.
- EAB HMAC Key: (Optional) The HMAC Key provided by your ACME CA for External Account Binding (EAB). This key is used in conjunction with the KID to prove ownership of the external account during ACME account registration.
Finally, press **Create** to register the ACME CA with Infisical.
</Step>
<Step title="Verify ACME CA Registration">
Once registered, your ACME CA will appear in the External Certificate Authorities section.
Finally, press **Create** to register the ACME-compatible CA with Infisical.
![pki external ca list](/images/platform/pki/ca/external-ca/external-ca-list.png)
From here, you can:
- View the status of the ACME CA registration
- Edit the configuration settings
- Disable or re-enable the ACME CA
- Delete the ACME CA registration if no longer needed
You can now use this ACME CA to issue certificates for your subscribers.
</Step>
</Steps>
Great! Youve successfully registered an external ACME-compatible CA with Infisical. Now check out the [Certificates](/documentation/platform/pki/certificates/overview) section to learn more about how to issue X.509 certificates using the ACME-compatible CA.
</Tab>
<Tab title="API">
To register an ACME CA with Infisical using the API, make a request to the Create External CA endpoint:
To register an ACME CA with Infisical using the API, make a request to the [Create External CA](https://infisical.com/docs/api-reference/endpoints/certificate-authorities/acme/create) endpoint:
### Sample request
@@ -227,78 +206,9 @@ In the following steps, we explore how to set up ACME Certificate Authority inte
</Tab>
</Tabs>
</Step>
<Step title="Create Subscriber for ACME CA">
Next, create a subscriber that uses your ACME CA for certificate issuance. Navigate to your Project > Subscribers and create a new subscriber.
Configure the subscriber with:
- **Issuing CA**: Select your registered ACME CA
- **Common Name**: The domain for which you want to issue certificates (e.g., `example.com`)
- **Alternative Names**: Additional domains to include in the certificate
Check out the [Subscribers](/documentation/platform/pki/subscribers) page for detailed instructions on creating and managing subscribers.
</Step>
<Step title="Issue Certificate">
Once your subscriber is configured, you can issue certificates either through the Infisical UI or programmatically via the API.
When you request a certificate:
1. Infisical generates a key pair for the certificate
2. Sends a Certificate Signing Request (CSR) to the ACME CA
3. Receives a DNS-01 challenge from the ACME provider
4. Creates a TXT record in Route53/Cloudflare to satisfy the challenge
5. Notifies the ACME provider that the challenge is ready for validation
6. Once validated, the ACME provider issues the certificate
7. Infisical stores and manages the certificate for your subscriber
The certificate will be automatically renewed before expiration according to your subscriber configuration.
</Step>
<Step title="Use Certificate in Your Applications">
The issued certificate and private key are now available through Infisical and can be:
- Downloaded directly from the Infisical UI
- Retrieved via the Infisical API for programmatic access using the [latest certificate bundle endpoint](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle)
</Step>
</Steps>
## Example: Let's Encrypt Integration
Let's Encrypt is a free, automated, and open Certificate Authority that provides domain-validated SSL/TLS certificates. Here's how the integration works with Infisical:
### Production Environment
- **Directory URL**: `https://acme-v02.api.letsencrypt.org/directory`
- **Rate Limits**: 50 certificates per registered domain per week
- **Certificate Validity**: 90 days with automatic renewal
- **Trusted By**: All major browsers and operating systems
### Staging Environment (for testing)
- **Directory URL**: `https://acme-staging-v02.api.letsencrypt.org/directory`
- **Rate Limits**: Much higher limits for testing
- **Certificate Validity**: 90 days (not trusted by browsers)
- **Use Case**: Testing your ACME integration without hitting production rate limits
<Note>
Always test your ACME integration using Let's Encrypt's staging environment
first. This allows you to verify your DNS configuration and certificate
issuance process without consuming your production rate limits.
</Note>
## Example: DigiCert Integration
DigiCert is a leading commercial Certificate Authority providing a wide range of trusted SSL/TLS certificates. Infisical can integrate with [DigiCert's ACME](https://docs.digicert.com/en/certcentral/certificate-tools/certificate-lifecycle-automation-guides/third-party-acme-integration/request-and-manage-certificates-with-acme.html) service to automate the provisioning and management of these certificates.
- **Directory URL**: `https://acme.digicert.com/v2/acme/directory`
- **External Account Binding (EAB)**: Required. You will need a Key Identifier (KID) and HMAC Key from your DigiCert account to register the ACME CA in Infisical.
- **Certificate Validity**: Typically 90 days, with automatic renewal through Infisical.
- **Trusted By**: All major browsers and operating systems.
<Note>
When integrating with DigiCert ACME, ensure you have obtained the necessary
External Account Binding (EAB) Key Identifier (KID) and HMAC Key from your
DigiCert account.
</Note>
## FAQ
<AccordionGroup>
@@ -325,17 +235,8 @@ DigiCert is a leading commercial Certificate Authority providing a wide range of
- Reduce the impact of compromised certificates
- Ensure systems stay up-to-date with certificate management practices
When configured, Infisical automatically handles certificate renewal for subscribers.
</Accordion>
<Accordion title="Can I use multiple ACME providers?">
Yes! You can register multiple ACME CAs in the same project:
- Different providers for different domains or use cases
- Staging and production environments for the same provider
- Backup providers for redundancy
Each subscriber can be configured to use a specific ACME CA based on your requirements.
Yes. You can register multiple ACME CAs in the same project.
</Accordion>
</AccordionGroup>

View File

@@ -0,0 +1,16 @@
---
title: "DigiCert"
description: "Learn how to connect Infisical to DigiCert to issue certificates."
---
## Concept
Infisical can connect to [DigiCert](https://www.digicert.com/) using the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) to issue certificates back to your end-entities.
## Guide to Connecting Infisical to DigiCert CA
To connect Infisical to DigiCert, follow the steps in the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) guide but use the DigiCert **ACME Directory URL**: `https://acme.digicert.com/v2/acme/directory`.
DigiCert requires **External Account Binding (EAB)** for all ACME registrations. You will need to obtain both a Key Identifier (KID) and an HMAC Key from your DigiCert account before registering the ACME CA in Infisical.
DigiCert typically issues certificates with a 90-day validity period.

View File

@@ -6,7 +6,7 @@ description: "Learn how to connect External Certificate Authorities with Infisic
## Concept
Infisical lets you integrate with External Certificate Authorities (CAs), allowing you to use existing PKI infrastructure or connect to public CAs to issue digital certificates for your end-entities.
Infisical lets you integrate with External Certificate Authorities (CAs), allowing you to use existing PKI infrastructure or connect to public CAs to issue certificates for your end-entities.
<div align="center">
@@ -23,7 +23,7 @@ As shown above, these CAs commonly fall under two categories:
- External Private CAs: CAs like AWS Private CA, HashiCorp Vault PKI, Azure ADCS, etc. that are privately owned and are used to issue certificates for internal services; these are often either cloud-hosted private CAs or on-prem / enterprise CAs.
- External Public CAs: CAs like Let's Encrypt, DigiCert, GlobalSign, etc. that are publicly trusted and are used to issue certificates for public-facing services.
Note that Infisical can also act as an _ACME client_, allowing you to integrate upstream with any ACME-compatible CA to automate certificate issuance and renewal.
Note that Infisical can act as an _ACME client_, allowing you to integrate upstream with any [ACME-compatible CA](/documentation/platform/pki/ca/acme-ca) to automate certificate issuance and renewal.
## Workflow

View File

@@ -0,0 +1,16 @@
---
title: "Let's Encrypt"
description: "Learn how to connect Infisical to Let's Encrypt to issue certificates."
---
## Concept
Infisical can connect to [Let's Encrypt](https://letsencrypt.org/) using the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) to issue certificates back to your end-entities.
## Guide to Connecting Infisical to Let's Encrypt CA
To connect Infisical to Let's Encrypt, follow the steps in the [ACME-compatible CA integration](/documentation/platform/pki/ca/acme-ca) guide but use the Let's Encrypt **ACME Directory URL**: `https://acme-v02.api.letsencrypt.org/directory`.
Note that Lets Encrypt issues 90-day certificates and enforces a limit of 50 certificates per registered domain per week.
We strongly recommend testing your setup against the Let's Encrypt staging environment first at the **ACME Directory URL** `https://acme-staging-v02.api.letsencrypt.org/directory` prior to switching to the production environment. This allows you to verify your DNS configuration and certificate issuance process without consuming production rate limits.

View File

@@ -22,10 +22,7 @@ where you can manage various aspects of its lifecycle including deployment to cl
To issue a certificate, you must first create a [certificate profile](/documentation/platform/pki/certificates/profiles) and a [certificate template](/documentation/platform/pki/certificates/templates) to go along with it.
The [enrollment method](/documentation/platform/pki/enrollment-methods/overview) configured on the certificate profile determines how a certificate is issued for it.
Refer to the documentation for each enrollment method below to learn more about how to issue certificates using it.
- [API](/documentation/platform/pki/enrollment-methods/api): Issue a certificate over UI or by making an API request to Infisical.
- [EST](/documentation/platform/pki/enrollment-methods/est): Issue a certificate over the EST protocol.
Refer to the documentation for each enrollment method to learn more about how to issue certificates using it.
## Guide to Renewing Certificates
@@ -49,24 +46,33 @@ Note that server-driven certificate renewal is only available for certificates i
A certificate can be considered for auto-renewal at time of issuance if the **Enable Auto-Renewal By Default** option is selected on its [certificate profile](/documentation/platform/pki/certificates/profiles) or after issuance by toggling this option manually.
<Info>
For server-driven certificate renewal workflows, you can programmatically fetch the latest active certificate bundle for a certificate profile using the [Get Latest Active Certificate Bundle](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle) API endpoint.
This ensures you always retrieve the most current valid certificate, including any that have been automatically renewed, making it particularly useful for deployment pipelines and automation workflows where you don't want to track individual serial numbers.
For server-driven certificate renewal workflows, you can programmatically
fetch the latest active certificate bundle for a certificate profile using the
[Get Latest Active Certificate
Bundle](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle)
API endpoint. This ensures you always retrieve the most current valid
certificate, including any that have been automatically renewed, making it
particularly useful for deployment pipelines and automation workflows where
you don't want to track individual serial numbers.
</Info>
The following examples demonstrate different approaches to certificate renewal:
- Using the ACME enrollment method, you may connect an ACME client like [certbot](https://certbot.eff.org/) to fetch back and renew certificates for Apache, Nginx, or other server. The ACME client will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to the server's configuration.
- Using the ACME enrollment method, you may use [cert-manager](https://cert-manager.io/) with Infisical to issue and renew certificates for Kubernetes workloads; cert-manager will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to Kubernetes secrets.
- Using the API enrollment method, you may push and auto-renew certificates to AWS and Azure using [certificate syncs](/documentation/platform/pki/certificate-syncs/overview). Certificates issued over the API enrollment method, where key pairs are generated server-side, are also eligible for server-side auto-renewal; once renewed, certificates are automatically pushed back to their sync destination.
- Using the [ACME enrollment method](/documentation/platform/pki/enrollment-methods/acme), you may connect an ACME client like [certbot](https://certbot.eff.org/) to fetch back and renew certificates for [Apache](/documentation/platform/pki/integration-guides/apache-certbot), [Nginx](/documentation/platform/pki/integration-guides/nginx-certbot), or other server. The ACME client will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to the server's configuration.
- Using the [ACME enrollment method](/documentation/platform/pki/enrollment-methods/acme), you may use [cert-manager](https://cert-manager.io/) with Infisical to issue and renew certificates for Kubernetes workloads; cert-manager will pursue a client-driven approach and submit certificate requests upon certificate expiration for you, saving renewed certificates back to Kubernetes secrets.
- Using the [API enrollment method](/documentation/platform/pki/enrollment-methods/api), you may push and auto-renew certificates to AWS and Azure using [certificate syncs](/documentation/platform/pki/certificate-syncs/overview). Certificates issued over the API enrollment method, where key pairs are generated server-side, are also eligible for server-side auto-renewal; once renewed, certificates are automatically pushed back to their sync destination.
## Guide to Exporting Certificates
## Guide to Downloading Certificates
In the following steps, we explore how to export certificates from Infisical in different formats for use in your applications and infrastructure.
In the following steps, we explore different options for exporting already-issued certificates from Infisical in different formats for use in your applications and infrastructure.
### Accessing the Export Certificate Modal
### Download Latest Profile Certificate
To export any certificate, first navigate to your project's certificate inventory and locate the certificate you want to export. Click on the **Export Certificate** option from the certificate's action menu.
You can download the latest certificate issued against a [certificate profile](/documentation/platform/pki/certificates/profiles) using the [latest certificate bundle](/api-reference/endpoints/certificate-profiles/get-latest-active-bundle) endpoint.
### Download Specific Certificate
To export a specific certificate, first navigate to your project's certificate inventory and locate the certificate you want to export. Click on the **Export Certificate** option from the certificate's action menu.
![pki export certificate option](/images/platform/pki/certificate/cert-export-option.png)
@@ -108,6 +114,7 @@ To export any certificate, first navigate to your project's certificate inventor
```
</Step>
</Steps>
</Tab>
<Tab title="PKCS12 Format">
<Steps>
@@ -158,6 +165,7 @@ To export any certificate, first navigate to your project's certificate inventor
</Info>
</Step>
</Steps>
</Tab>
</Tabs>

View File

@@ -7,7 +7,7 @@ sidebarTitle: "Templates"
A certificate template is a policy structure specifying permitted attributes for requested certificates. This includes constraints around subject naming conventions, SAN fields, key usages, and extended key usages.
Each certificate requested against a profile is validated against the template bound to that profile. If the request fails any criteria included in the template, the certificate is not issued. This helps administrators enforce uniformity and security standards across all issued certificates.
Each certificate requested against a [certificate profile](/documentation/platform/pki/certificates/profiles) is validated against the template bound to that profile. If the request fails any criteria included in the template, the certificate is not issued. This helps administrators enforce uniformity and security standards across all issued certificates.
## Guide to Creating a Certificate Template

View File

@@ -3,6 +3,62 @@ title: "Certificate Enrollment via ACME"
sidebarTitle: "ACME"
---
<Info>
ACME-based certificate enrollment is currently under development and will be included in a future release.
</Info>
## Concept
The ACME enrollment method allows you to issue and manage certificates against a specific [certificate profile](/documentation/platform/pki/certificates/profiles) using the [ACME protocol](https://en.wikipedia.org/wiki/Automatic_Certificate_Management_Environment).
This method is suitable for web servers, load balancers, and other general-purpose servers that can run an [ACME client](https://letsencrypt.org/docs/client-options/) for automated certificate management.
Infisical's ACME enrollment method is based on [RFC 8555](https://datatracker.ietf.org/doc/html/rfc8555/).
## Prerequisites
Install an [ACME client](https://letsencrypt.org/docs/client-options/) onto your server. This client will handle [ACME challenges](https://letsencrypt.org/docs/challenge-types/) and request/renew certificates from Infisical.
## Guide to Certificate Enrollment via ACME
In the following steps, we explore how to issue a X.509 certificate using the ACME enrollment method.
<Steps>
<Step title="Create a certificate profile in Infisical">
Create a [certificate
profile](/documentation/platform/pki/certificates/profiles) with **ACME**
selected as the enrollment method.
![pki acme config](/images/platform/pki/enrollment-methods/acme/acme-config.png)
</Step>
<Step title="Obtain the ACME configuration">
Once you've created the certificate profile, you can obtain its ACME configuration details by clicking the **Reveal ACME EAB** option on the profile.
![pki acme eab config](/images/platform/pki/enrollment-methods/acme/acme-eab.png)
From the ACME configuration, gather the following values:
- ACME Directory URL: The URL that the ACME client will use to communicate with Infisical's ACME server.
- EAB Key Identifier (KID): A unique identifier that tells Infisical which ACME account is making the request.
- EAB Secret: A secret key that authenticates your ACME client with Infisical.
</Step>
<Step title="Configure your ACME client">
Provide the **ACME Directory URL**, **EAB KID**, and **EAB Secret** from Step 2 to your ACME client to authenticate with Infisical and request a certificate.
For example, if using [Certbot](https://certbot.eff.org/) as an ACME client, you can configure and start requesting certificates with the following command:
```bash
sudo certbot certonly \
--standalone \
--server "https://your-infisical-instance.com/api/v1/pki/certificate-profiles/{profile-id}/acme/directory" \
--eab-kid "your-eab-kid" \
--eab-hmac-key "your-eab-secret" \
-d example.infisical.com \
--email admin@example.com \
--agree-tos \
--non-interactive
```
Certbot stores the private key and resulting leaf certificate and full certificate chain in `/etc/letsencrypt/live/{domain-name}/`.
For client-specific setup and usage instructions, refer to the documentation for your ACME client.
</Step>
</Steps>

View File

@@ -5,7 +5,7 @@ sidebarTitle: "API"
## Concept
The API enrollment method allows you to issue certificates against a specific certificate profile over Web UI or by making an API request to Infisical.
The API enrollment method allows you to issue certificates against a specific [certificate profile](/documentation/platform/pki/certificates/profiles) over Web UI or by making an API request to Infisical.
## Guide to Certificate Enrollment via API
@@ -15,7 +15,7 @@ In the following steps, we explore how to issue a X.509 certificate using the AP
<Tab title="Infisical UI">
<Steps>
<Step title="Create a certificate profile">
<Step title="Create a certificate profile in Infisical">
Create a [certificate
profile](/documentation/platform/pki/certificates/profiles) with **API**
selected as the enrollment method.
@@ -54,7 +54,7 @@ Here, select the certificate profile from step 1 that will be used to issue the
<Tab title="API">
<Steps>
<Step title="Create a certificate profile">
<Step title="Create a certificate profile in Infisical">
To create a certificate [profile](/documentation/platform/pki/certificates/profiles), make an API request to the [Create Certificate Profile](/api-reference/endpoints/certificate-profiles/create) API endpoint.

View File

@@ -5,7 +5,7 @@ sidebarTitle: "EST"
## Concept
The API enrollment method allows you to issue and manage certificates against a specific certificate profile using the [EST protocol](https://en.wikipedia.org/wiki/Enrollment_over_Secure_Transport).
The EST enrollment method allows you to issue and manage certificates against a specific [certificate profile](/documentation/platform/pki/certificates/profiles) using the [EST protocol](https://en.wikipedia.org/wiki/Enrollment_over_Secure_Transport).
This method is suitable for environments requiring strong authentication and encrypted communication, such as in IoT, enterprise networks, and secure web services.
Infisical's EST service is based on [RFC 7030](https://datatracker.ietf.org/doc/html/rfc7030) and implements the following endpoints:
@@ -32,7 +32,7 @@ and structured under `https://app.infisical.com:8443/.well-known/est/{profile_id
In the following steps, we explore how to issue a X.509 certificate using the EST enrollment method.
<Steps>
<Step title="Set up up a certificate profile">
<Step title="Create a certificate profile in Infisical">
Create a [certificate
profile](/documentation/platform/pki/certificates/profiles) with **EST**
selected as the enrollment method and fill in EST-specific configuration.

View File

@@ -5,7 +5,10 @@ sidebarTitle: "Overview"
Enrollment methods determine how certificates are issued and managed for a [certificate profile](/documentation/platform/pki/certificates/profiles).
Refer to the documentation for each enrollment method to learn more about how to enroll certificates using it.
Refer to the documentation for each enrollment method below to learn more about how to enroll certificates using it.
- [API](/documentation/platform/pki/enrollment-methods/api): Enroll certificates via API.
- [EST](/documentation/platform/pki/enrollment-methods/est): Enroll certificates via EST protocol.
- [ACME](/documentation/platform/pki/enrollment-methods/acme): Enroll certificates using the ACME protocol.
- [EST](/documentation/platform/pki/enrollment-methods/est): Enroll certificates using the EST protocol.
Note that beyond using an enrollment method, you can also deliver a certificate to a target destination using supported [certificate syncs](https://infisical.com/docs/documentation/platform/pki/certificate-syncs/overview).

View File

@@ -12,7 +12,7 @@ Core capabilities include:
- [Private CA](/documentation/platform/pki/ca/private-ca): Create and manage your own private CA hierarchy including root and intermediate CAs.
- [External CA integration](/documentation/platform/pki/ca/external-ca): Integrate with external public and private CAs including [Azure ADCS](/documentation/platform/pki/ca/azure-adcs) and [ACME-compatible CAs](/documentation/platform/pki/ca/acme-ca) like Let's Encrypt and DigiCert.
- [Certificate Enrollment](/documentation/platform/pki/enrollment-methods/overview): Support enrollment methods including [API](/documentation/platform/pki/enrollment-methods/api), ACME, [EST](/documentation/platform/pki/enrollment-methods/est), and more to automate certificate issuance for services, devices, and workloads.
- [Certificate Enrollment](/documentation/platform/pki/enrollment-methods/overview): Support enrollment methods including [API](/documentation/platform/pki/enrollment-methods/api), [ACME](/documentation/platform/pki/enrollment-methods/acme), [EST](/documentation/platform/pki/enrollment-methods/est), and more to automate certificate issuance for services, devices, and workloads.
- Certificate Inventory: Track and monitor issued X.509 certificates, maintaining a comprehensive inventory of all active and expired certificates.
- Certificate Lifecycle Automation: Automate issuance, [renewal](/documentation/platform/pki/certificates/certificates#guide-to-renewing-certificates), and [revocation](/documentation/platform/pki/certificates/certificates#guide-to-revoking-certificates) with policy-based workflows, ensuring certificates remain valid, compliant, and up to date across your infrastructure.
- [Certificate Syncs](/documentation/platform/pki/certificate-syncs/overview): Push certificates to cloud certificate managers like [AWS Certificate Manager](/documentation/platform/pki/certificate-syncs/aws-certificate-manager) and [Azure Key Vault](/documentation/platform/pki/certificate-syncs/azure-key-vault).

Binary file not shown.

Before

Width:  |  Height:  |  Size: 991 KiB

After

Width:  |  Height:  |  Size: 276 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 587 KiB

After

Width:  |  Height:  |  Size: 408 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 989 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 313 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 298 KiB

View File

@@ -43,6 +43,9 @@ description: "Learn how to configure a Vercel Sync for Infisical."
- **Overwrite Destination Secrets**: Removes any secrets at the destination endpoint not present in Infisical.
- **Import Secrets (Prioritize Infisical)**: Imports secrets from the destination endpoint before syncing, prioritizing values from Infisical over Vercel when keys conflict.
- **Import Secrets (Prioritize Vercel)**: Imports secrets from the destination endpoint before syncing, prioritizing values from Vercel over Infisical when keys conflict.
<Note>
Vercel does not expose the values of [sensitive environment variables](https://vercel.com/docs/environment-variables/sensitive-environment-variables), so Infisical cannot import them during the initial sync. As a result, these secrets are created in Infisical with empty values. After the first sync, you'll need to manually re-enter their values in Infisical to ensure both platforms stay aligned.
</Note>
- **Key Schema**: Template that determines how secret names are transformed when syncing, using `{{secretKey}}` as a placeholder for the original secret name and `{{environment}}` for the environment.
<Note>
We highly recommend using a Key Schema to ensure that Infisical only manages the specific keys you intend, keeping everything else untouched.
@@ -149,4 +152,5 @@ description: "Learn how to configure a Vercel Sync for Infisical."
}
```
</Tab>
</Tabs>

View File

@@ -118,6 +118,22 @@ var _ = await sdk.Auth().UniversalAuth().LoginAsync(
- `clientId` (string): The client ID of your Machine Identity.
- `clientSecret` (string): The client secret of your Machine Identity.
### LDAP Auth
#### Authenticating
```cs
var _ = await sdk.Auth().LdapAuth().LoginAsync(
"IDENTITY_ID",
"USERNAME",
"PASSWORD"
);
```
**Parameters:**
- `identityId` (string): The ID of your Machine Identity .
- `username` (string): The LDAP username for authentication.
- `password` (string): The LDAP password for authentication.
### `Secrets()`
The `Secrets()` sub-class handles operations related to the Infisical secrets management product.

View File

@@ -284,6 +284,114 @@ if err != nil {
}
```
#### JWT Auth
<Info>
Please note that this authentication method requires a valid JWT token from your JWT issuer. Please [read
more](/documentation/platform/identities/jwt-auth) about this authentication
method.
</Info>
**Using the SDK**
```go
credential, err := client.Auth().JwtAuthLogin("MACHINE_IDENTITY_ID", "JWT_TOKEN")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
#### LDAP Auth
<Info>
Please note that this authentication method requires LDAP credentials. Please [read
more](/documentation/platform/identities/ldap-auth/general) about this authentication
method.
</Info>
**Using environment variables**
You can set the `INFISICAL_LDAP_AUTH_IDENTITY_ID` environment variable and pass empty string for the identity ID:
```go
credential, err := client.Auth().LdapAuthLogin("", "LDAP_USERNAME", "LDAP_PASSWORD")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
**Using the SDK directly**
```go
credential, err := client.Auth().LdapAuthLogin("MACHINE_IDENTITY_ID", "LDAP_USERNAME", "LDAP_PASSWORD")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
#### OCI Auth
<Info>
Please note that this authentication method will only work if you're running
your application on Oracle Cloud Infrastructure. Please [read
more](/documentation/platform/identities/oci-auth) about this authentication
method.
</Info>
**Using environment variables**
You can set the `INFISICAL_OCI_AUTH_IDENTITY_ID` environment variable and omit the `IdentityID` field:
```go
credential, err := client.Auth().OciAuthLogin(infisical.OciAuthLoginOptions{
UserID: "USER_OCID",
TenancyID: "TENANCY_OCID",
Fingerprint: "FINGERPRINT",
PrivateKey: "PRIVATE_KEY",
Region: "REGION",
})
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
**Using the SDK directly**
```go
credential, err := client.Auth().OciAuthLogin(infisical.OciAuthLoginOptions{
IdentityID: "MACHINE_IDENTITY_ID",
UserID: "USER_OCID",
TenancyID: "TENANCY_OCID",
Fingerprint: "FINGERPRINT",
PrivateKey: "PRIVATE_KEY",
Region: "REGION",
Passphrase: nil, // Optional: pointer to string if your private key has a passphrase
})
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
**OciAuthLoginOptions fields:**
- `IdentityID` (string) - Your Infisical Machine Identity ID. Can be set via `INFISICAL_OCI_AUTH_IDENTITY_ID` environment variable.
- `UserID` (string) - Your OCI user OCID.
- `TenancyID` (string) - Your OCI tenancy OCID.
- `Fingerprint` (string) - Your OCI API key fingerprint.
- `PrivateKey` (string) - Your OCI private key (PEM format).
- `Region` (string) - Your OCI region (e.g., `us-ashburn-1`).
- `Passphrase` (*string) - Optional: pointer to passphrase string if your private key is encrypted.
## Secrets
### List Secrets

View File

@@ -14,14 +14,13 @@ This guide walks through how you can use these paid features on a self-hosted in
Once purchased, you will be issued a license key.
</Step>
<Step title="Activate the license">
Depending on whether or not the environment where Infisical is deployed has internet access, you may be issued a regular license or an offline license.
Set your license key as the value of the **LICENSE_KEY** environment variable within your Infisical instance.
<Tabs>
<Tab title="Regular License">
- Assign the issued license key to the `LICENSE_KEY` environment variable in your Infisical instance.
- Your Infisical instance will need to communicate with the Infisical license server to validate the license key.
- Your Infisical instance will need to communicate with the Infisical license server to validate the license key.
If you want to limit outgoing connections only to the Infisical license server, you can use the following IP addresses: `13.248.249.247` and `35.71.190.59`
<Note>
@@ -29,16 +28,18 @@ This guide walks through how you can use these paid features on a self-hosted in
</Note>
</Tab>
<Tab title="Offline License">
- Assign the issued license key to the `LICENSE_KEY_OFFLINE` environment variable in your Infisical instance.
- Assign the issued offline license key to the `LICENSE_KEY` environment variable in your Infisical instance.
- The system will automatically detect that it's an offline license based on the key format.
<Note>
How you set the environment variable will depend on the deployment method you used. Please refer to the documentation of your deployment method for specific instructions.
While the LICENSE_KEY_OFFLINE environment variable continues to be supported for compatibility with existing configurations, we recommend transitioning to LICENSE_KEY for all license types going forward.
</Note>
</Tab>
</Tabs>
Once your instance starts up, the license key will be validated and youll be able to use the paid features.
Once your instance starts up, the license key will be validated and you'll be able to use the paid features.
However, when the license expires, Infisical will continue to run, but EE features will be disabled until the license is renewed or a new one is purchased.
</Step>
</Steps>
</Steps>

View File

@@ -694,4 +694,20 @@ For enterprise deployments requiring compliance certifications:
### Standards Compliance
**FIPS 140-3 Compliance**. Infisical is actively working on FIPS 140-3 compliance to meet U.S. and Canadian government cryptographic standards. This will provide validated cryptographic modules for organizations requiring certified encryption implementations.
#### FIPS 140-3 Compliance
Infisical is compliant with FIPS 140-3, meeting U.S. and Canadian government cryptographic standards through validated cryptographic modules.
This certification is designed for organizations that require government-approved encryption implementations.
To deploy a FIPS-compliant instance, use the [infisical/infisical-fips](https://hub.docker.com/r/infisical/infisical-fips) Docker image, available to Enterprise customers.
Our FIPS 140-3 attestation letter is available in the [Infisical Trust Center](https://trust.infisical.com/).
#### SOC 2 Compliance
Infisical is SOC 2 compliant, demonstrating adherence to rigorous security, availability, and confidentiality standards established by the American Institute of CPAs (AICPA).
This certification validates our security controls and operational practices for organizations requiring third-party audited security assurance. Our SOC 2 report is available in the [Infisical Trust Center](https://trust.infisical.com/).
#### HIPAA Compliance
Infisical is HIPAA compliant, meeting the security and privacy requirements of the Health Insurance Portability and Accountability Act.
This compliance framework ensures appropriate safeguards for protected health information (PHI) for healthcare organizations and their business associates.
Our HIPAA certification is available in the [Infisical Trust Center](https://trust.infisical.com/).

View File

@@ -41,7 +41,6 @@
"common": {
"head-title": "{{title}} | Infisical",
"error_project-already-exists": "A project with this name already exists.",
"no-mobile": " To use Infisical, please log in through a device with larger dimensions. ",
"email": "Email",
"password": "Password",
"first-name": "First Name",

View File

@@ -41,7 +41,6 @@
"common": {
"head-title": "{{title}} | Infisical",
"error_project-already-exists": "Ya existe un proyecto con este nombre.",
"no-mobile": "Para usar Infisical, inicia sesión con un dispositivo de mayores dimesiones.",
"email": "Correo electrónico",
"password": "Contraseña",
"first-name": "Nombre",

View File

@@ -41,7 +41,6 @@
"common": {
"head-title": "{{title}} | Infisical",
"error_project-already-exists": "Un projet avec ce nom existe déjà.",
"no-mobile": " Pour utiliser Infisical, veuillez vous connecter avec un appareil avec des dimensions plus grandes. ",
"email": "Email",
"password": "Mot de passe",
"first-name": "Prénom",

View File

@@ -30,7 +30,6 @@
"common": {
"head-title": "{{title}} | Infisical",
"error_project-already-exists": "동일한 이름을 가진 프로젝트가 이미 존재해요.",
"no-mobile": " Infisical을 사용하려면, 큰 화면을 가진 디바이스로 로그인하여 주세요.",
"email": "메일",
"password": "비밀번호",
"first-name": "이름",

View File

@@ -41,7 +41,6 @@
"common": {
"head-title": "{{title}} | Infisical",
"error_project-already-exists": "Já exite um projeto com este nome.",
"no-mobile": "Para usar o Infisical, faça o login através de um dispositivo com dimensões maiores.",
"email": "Email",
"password": "Senha",
"first-name": "Primeiro Nome",

View File

@@ -41,7 +41,6 @@
"common": {
"head-title": "{{title}} | Infisical",
"error_project-already-exists": "Bu isimle bir proje zaten mevcut.",
"no-mobile": " Infisical'ı kullanmak için, lütfen daha büyük boyutlara sahip bir cihaz üzerinden giriş yapın. ",
"email": "Email",
"password": "Şifre",
"first-name": "Adınız",

View File

@@ -76,8 +76,8 @@ export const ProjectOverviewChangeSection = ({ showSlugField = false }: Props) =
return (
<div className="mb-6 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-4">
<div className="justify-betweens flex">
<h2 className="mb-8 flex-1 text-xl font-medium text-mineshaft-100">Project Overview</h2>
<div className="justify-betweens mb-8 flex flex-wrap gap-2">
<h2 className="flex-1 text-xl font-medium text-mineshaft-100">Project Overview</h2>
<div className="space-x-2">
<Button
variant="outline_bg"

View File

@@ -262,24 +262,25 @@ export const eventToNameMap: { [K in EventType]: string } = {
[EventType.UPDATE_IDENTITY_PROJECT_MEMBERSHIP]: "Update Identity Project Membership",
[EventType.DELETE_IDENTITY_PROJECT_MEMBERSHIP]: "Delete Identity Project Membership",
[EventType.PAM_SESSION_START]: "PAM Session Start",
[EventType.PAM_SESSION_LOGS_UPDATE]: "PAM Session Logs Update",
[EventType.PAM_SESSION_END]: "PAM Session End",
[EventType.PAM_SESSION_GET]: "PAM Session Get",
[EventType.PAM_SESSION_LIST]: "PAM Session List",
[EventType.PAM_FOLDER_CREATE]: "PAM Folder Create",
[EventType.PAM_FOLDER_UPDATE]: "PAM Folder Update",
[EventType.PAM_FOLDER_DELETE]: "PAM Folder Delete",
[EventType.PAM_ACCOUNT_LIST]: "PAM Account List",
[EventType.PAM_ACCOUNT_ACCESS]: "PAM Account Access",
[EventType.PAM_ACCOUNT_CREATE]: "PAM Account Create",
[EventType.PAM_ACCOUNT_UPDATE]: "PAM Account Update",
[EventType.PAM_ACCOUNT_DELETE]: "PAM Account Delete",
[EventType.PAM_RESOURCE_LIST]: "PAM Resource List",
[EventType.PAM_RESOURCE_GET]: "PAM Resource Get",
[EventType.PAM_RESOURCE_CREATE]: "PAM Resource Create",
[EventType.PAM_RESOURCE_UPDATE]: "PAM Resource Update",
[EventType.PAM_RESOURCE_DELETE]: "PAM Resource Delete",
[EventType.PAM_SESSION_CREDENTIALS_GET]: "Get PAM Session Credentials",
[EventType.PAM_SESSION_START]: "Start PAM Session",
[EventType.PAM_SESSION_LOGS_UPDATE]: "Update PAM Session Logs",
[EventType.PAM_SESSION_END]: "End PAM Session",
[EventType.PAM_SESSION_GET]: "Get PAM Session",
[EventType.PAM_SESSION_LIST]: "List PAM Sessions",
[EventType.PAM_FOLDER_CREATE]: "Create PAM Folder",
[EventType.PAM_FOLDER_UPDATE]: "Update PAM Folder",
[EventType.PAM_FOLDER_DELETE]: "Delete PAM Folder",
[EventType.PAM_ACCOUNT_LIST]: "List PAM Accounts",
[EventType.PAM_ACCOUNT_ACCESS]: "Access PAM Account",
[EventType.PAM_ACCOUNT_CREATE]: "Create PAM Account",
[EventType.PAM_ACCOUNT_UPDATE]: "Update PAM Account",
[EventType.PAM_ACCOUNT_DELETE]: "Delete PAM Account",
[EventType.PAM_RESOURCE_LIST]: "List PAM Resources",
[EventType.PAM_RESOURCE_GET]: "Get PAM Resource",
[EventType.PAM_RESOURCE_CREATE]: "Create PAM Resource",
[EventType.PAM_RESOURCE_UPDATE]: "Update PAM Resource",
[EventType.PAM_RESOURCE_DELETE]: "Delete PAM Resource",
[EventType.CREATE_CERTIFICATE_PROFILE]: "Create Certificate Profile",
[EventType.UPDATE_CERTIFICATE_PROFILE]: "Update Certificate Profile",
@@ -314,6 +315,7 @@ const sharedProjectEvents = [
export const projectToEventsMap: Partial<Record<ProjectType, EventType[]>> = {
[ProjectType.PAM]: [
...sharedProjectEvents,
EventType.PAM_SESSION_CREDENTIALS_GET,
EventType.PAM_SESSION_START,
EventType.PAM_SESSION_LOGS_UPDATE,
EventType.PAM_SESSION_END,

View File

@@ -254,6 +254,7 @@ export enum EventType {
UPDATE_IDENTITY_PROJECT_MEMBERSHIP = "update-identity-project-membership",
DELETE_IDENTITY_PROJECT_MEMBERSHIP = "delete-identity-project-membership",
PAM_SESSION_CREDENTIALS_GET = "pam-session-credentials-get",
PAM_SESSION_START = "pam-session-start",
PAM_SESSION_LOGS_UPDATE = "pam-session-logs-update",
PAM_SESSION_END = "pam-session-end",

View File

@@ -27,9 +27,18 @@ export const useDeleteCert = () => {
);
return certificate;
},
onSuccess: (_, { projectSlug }) => {
onSuccess: (_, { projectId }) => {
queryClient.invalidateQueries({
queryKey: projectKeys.forProjectCertificates(projectSlug)
queryKey: ["certificate-profiles", "list"]
});
queryClient.invalidateQueries({
queryKey: pkiSubscriberKeys.allPkiSubscriberCertificates()
});
queryClient.invalidateQueries({
queryKey: projectKeys.allProjectCertificates()
});
queryClient.invalidateQueries({
queryKey: projectKeys.forProjectCertificates(projectId)
});
}
});
@@ -49,16 +58,18 @@ export const useRevokeCert = () => {
);
return certificate;
},
onSuccess: (_, { projectSlug }) => {
onSuccess: (_, { projectId }) => {
queryClient.invalidateQueries({
queryKey: projectKeys.forProjectCertificates(projectSlug)
queryKey: ["certificate-profiles", "list"]
});
queryClient.invalidateQueries({
queryKey: pkiSubscriberKeys.allPkiSubscriberCertificates()
});
queryClient.invalidateQueries({
queryKey: ["certificate-profiles", "list"]
queryKey: projectKeys.allProjectCertificates()
});
queryClient.invalidateQueries({
queryKey: projectKeys.forProjectCertificates(projectId)
});
}
});

Some files were not shown because too many files have changed in this diff Show More